diff --git a/.github/workflows/sonar-scan.yml b/.github/workflows/sonar-scan.yml index 21a4b50d5..e0f98f393 100644 --- a/.github/workflows/sonar-scan.yml +++ b/.github/workflows/sonar-scan.yml @@ -123,7 +123,7 @@ jobs: develop: name: Build Nightly Docker if Develop push - needs: [ build, test, version ] + needs: [ build, version ] runs-on: ubuntu-latest if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/develop' }} steps: @@ -232,7 +232,7 @@ jobs: stable: name: Build Stable Docker if Main push - needs: [ build, test ] + needs: [ build ] runs-on: ubuntu-latest if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} steps: diff --git a/.gitignore b/.gitignore index 9e470748b..078b6108c 100644 --- a/.gitignore +++ b/.gitignore @@ -530,3 +530,4 @@ API.Tests/TestResults/ UI/Web/.vscode/settings.json /API.Tests/Services/Test Data/ArchiveService/CoverImages/output/* UI/Web/.angular/ +BenchmarkDotNet.Artifacts \ No newline at end of file diff --git a/API.Benchmark/API.Benchmark.csproj b/API.Benchmark/API.Benchmark.csproj index 31af4f2c6..11ef151a2 100644 --- a/API.Benchmark/API.Benchmark.csproj +++ b/API.Benchmark/API.Benchmark.csproj @@ -10,15 +10,21 @@ - - + + - + Always - + + + + + Data + Always + diff --git a/API.Benchmark/ArchiveSerivceBenchmark.cs b/API.Benchmark/ArchiveSerivceBenchmark.cs deleted file mode 100644 index c60a4271f..000000000 --- a/API.Benchmark/ArchiveSerivceBenchmark.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace API.Benchmark -{ - public class ArchiveSerivceBenchmark - { - // Benchmark to test default GetNumberOfPages from archive - // vs a new method where I try to open the archive and return said stream - } -} diff --git a/API.Benchmark/ArchiveServiceBenchmark.cs b/API.Benchmark/ArchiveServiceBenchmark.cs new file mode 100644 index 000000000..d8418ee26 --- /dev/null +++ b/API.Benchmark/ArchiveServiceBenchmark.cs @@ -0,0 +1,54 @@ +using System; +using System.IO.Abstractions; +using Microsoft.Extensions.Logging.Abstractions; +using API.Services; +using BenchmarkDotNet.Attributes; +using BenchmarkDotNet.Order; + +namespace API.Benchmark; + +[StopOnFirstError] +[MemoryDiagnoser] +[RankColumn] +[Orderer(SummaryOrderPolicy.FastestToSlowest)] +[SimpleJob(launchCount: 1, warmupCount: 5, targetCount: 20)] +public class ArchiveServiceBenchmark +{ + private readonly ArchiveService _archiveService; + private readonly IDirectoryService _directoryService; + private readonly IImageService _imageService; + + public ArchiveServiceBenchmark() + { + _directoryService = new DirectoryService(null, new FileSystem()); + _imageService = new ImageService(null, _directoryService); + _archiveService = new ArchiveService(new NullLogger(), _directoryService, _imageService); + } + + [Benchmark(Baseline = true)] + public void TestGetComicInfo_baseline() + { + if (_archiveService.GetComicInfo("Data/ComicInfo.zip") == null) { + throw new Exception("ComicInfo not found"); + } + } + + [Benchmark] + public void TestGetComicInfo_duplicate() + { + if (_archiveService.GetComicInfo("Data/ComicInfo_duplicateInfos.zip") == null) { + throw new Exception("ComicInfo not found"); + } + } + + [Benchmark] + public void TestGetComicInfo_outside_root() + { + if (_archiveService.GetComicInfo("Data/ComicInfo_outside_root.zip") == null) { + throw new Exception("ComicInfo not found"); + } + } + + // Benchmark to test default GetNumberOfPages from archive + // vs a new method where I try to open the archive and return said stream +} diff --git a/API.Benchmark/CleanTitleBenchmark.cs b/API.Benchmark/CleanTitleBenchmark.cs new file mode 100644 index 000000000..90310a9ef --- /dev/null +++ b/API.Benchmark/CleanTitleBenchmark.cs @@ -0,0 +1,26 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text.RegularExpressions; +using BenchmarkDotNet.Attributes; +using BenchmarkDotNet.Order; + +namespace API.Benchmark; + +[MemoryDiagnoser] +public static class CleanTitleBenchmarks +{ + private static IList _names; + + [GlobalSetup] + public static void LoadData() => _names = File.ReadAllLines("Data/Comics.txt"); + + [Benchmark] + public static void TestCleanTitle() + { + foreach (var name in _names) + { + Services.Tasks.Scanner.Parser.Parser.CleanTitle(name, true); + } + } +} diff --git a/API.Benchmark/Data/Comics.txt b/API.Benchmark/Data/Comics.txt new file mode 100644 index 000000000..05eb2d52f --- /dev/null +++ b/API.Benchmark/Data/Comics.txt @@ -0,0 +1,112 @@ +One-Star Squadron 02 (of 06) (2022) (digital) (Son of Ultron-Empire).cbz +Batman & the Monster Men 06 (2006) (Kryptonia-DCP).cbr +Hauteville House -07- Expedition Vanikoro.cbr +Fantastic Four v3 #020.cbz +Thunderbolts 053.cbr +Moon Knight 010 2007 Red Lion-DCP .cbr +New X-Men 037.cbr +X-Men - Deadly Genesis 02 (2006) (BigBlue-DCP).cbr +Incredible Hercules 128.cbr +JLA - Year One 03 of 12.cbr +Daredevil v2 082 (2006) (Reiu-DCP).cbr +069 - Iron Man v4 035 (2009) (Minutemen-ZonesDiva).cbr +2000AD prog 2285 (2022) (digital) (Minutemen-juvecube).cbz +Tanguy et Laverdure - Intégrale - T07.cbz +Excalibur 026 (2022) (Digital) (Zone-Empire).cbz +DC vs. Vampires - Killers 001 (2022) (Webrip) (The Last Kryptonian-DCP).cbz +By the Horns 003 (2021) (Digital) (Mephisto-Empire).cbz +Incredible Hulks 630 (2011) (Minutemen-Fiji).cbz +Red Robin 010 (2010) (Minutemen-OTT).cbr +Les Droits de lHomme - OneShot - Collectif.cbz +Tout Gaston - Intégrale.cbr +Good Night, Hem (2021) (Digital) (Dipole-Empire).cbz +Bunny Mask - The Hollow Inside 001 (2022) (Digital) (Mephisto-Empire).cbz +Les MYTHICS - T14 - Avarice.cbr +Fantastic Four Special 01 (2006) (Nascent-DCP).cbr +Sonjaversal 006 (2021) (5 covers) (digital) (The Seeker-Empire).cbz +The Flash 779 (2022) (Digital) (Zone-Empire).cbz +Supergirl and the Legion of Super-Heroes 020 (2006) (CamelotScans-DCP).cbr +Time Before Time 015 (2022) (Digital) (Zone-Empire).cbz +Union Jack 02 (2006) (Red Lion-DCP).cbr +Le Corps est un Vêtement que l'on quitte.pdf +Helmet of Fate - Black Alice 01 (2007) (Racerx-DCP).cbz +Villains United 003 [2005] (Team-DCP).cbr +Punisher 002.cbr +Grendel - Devil's Odyssey 008 (2021) (digital) (NeverAngel-Empire).cbz +Uncanny X-Force 05.1 (2011) (Minutemen-Megatonic).cbz +Orcs & Gobelins - T14 - Shaaka.cbr +Les grands personnages de l'histoire en bandes dessinées - T67 - Suffren - La Bataille de Gondelou.cbz +Batman Adventures 013 (Jorl - Dcp).cbr +Norse Mythology II 003 (2021) (digital) (Son of Ultron-Empire).cbz +Ghost Rider 012 (2007) (Team-DCP).cbr +Once & Future 021 (2021) (digital) (Son of Ultron-Empire).cbz +The Seven Deadly Sins #1_ Seven Deadly Her - Nakaba Suzuki.epub +Kimagure Orange Road Omnibus #5_ Vol. 5 - Izumi Matsumoto.cbz +Booster Gold 36 2010 Minutemen-Oracle Saxon .cbr +New X-Men 023 (2006) (Reiu-DCP).cbr +World of Betty and Veronica Comics Digest 016 (2022) (Forsythe-DCP).cbz +Deadpool Team-Up 889 (2010) (noads) (LegionNever-CPS).cbr +Les bêtes de black city - T03 - le feu de la vengeance.cbr +The Brother of All Men 002 (2022) (digital) (Son of Ultron-Empire).cbz +DC Fifty-Two (52) Week One (2006) (Kryptonia-DCP).cbr +Heroes For Hire v2 09 (2007) (DarthScanner-DCP).cbr +Doom Patrol v4 012 [2005] (Bchry-DCP).cbr +Black Panther's Prey #1(Aieiebrazoff-DCP)-Repack.cbz +Hello Neighbor 02 - The Raven Brooks Disaster (2021) (Digital Rip) (Hourman-DCP).cbz +Grimm Spotlight - Cinderella vs. Zombies (2021) (digital) (The Seeker-Empire).cbz +Black's Myth 001 (2021) (digital) (Son of Ultron-Empire).cbz +Donjon Antipodes T02 +10001 Le Coffre aux Âmes.pdf +Ghost Rider 016 (2007) (Noads) (Team-DCP).cbr +JLA Classified 38 (2007) (Wolfrider-DCP).cbr +Olive 003 - On the Trail of the Nerpa (2021) (digital) (Mr Norrell-Empire).cbz +Avengers v3 #054.cbz +Doctor Strange - The Oath 01 (2006) (Kryptonia-DCP).cbr +Red Robin 006 2010 Minutemen-DTermined.cbr +056 - She-Hulk v2 032 (2008) (2 covers) (Minutemen-ReZone).cbr +DC Fifty-Two (52) Week 030 (2007) (Kryptonia-DCP).cbr +Detective Comics 1055 (2022) (Webrip) (The Last Kryptonian-DCP).cbz +Spider-Man vs. Vampires 01 2010 Minutemen-DTs .cbz +Grim 003 (2022) (digital) (Son of Ultron-Empire).cbz +Wastelanders - Star-Lord 001 (2022) (Digital) (Zone-Empire).cbz +Superman [2003-38] Adventures of Superman 621.cbr +Elektra - Black, White & Blood 001 (2022) (Digital) (Zone-Empire).cbz +Félix #15 - Heroic Album -1950- Le Tueur Fantome.cbz +Ms. Marvel v2 09 (2006) (Team-DCP).cbr +Stray Dogs - Dog Days 002 (2022) (digital) (Son of Ultron-Empire).cbz +My Date With Monsters 002 (2021) (Digital) (Mephisto-Empire).cbz +Friendly Neighborhood Spider-Man 02 (2006) (Variant Cvr) (Wildcarde1-DCP).cbr +Acriboréa -T03- Des millions de soleils.cbr +X-Men: Phoenix - Endsong 05 (of 5) [2005] (Team-DCP).cbr +Usagi Yojimbo - Lone Goat and Kid 006 (2022) (digital) (Son of Ultron-Empire).cbz +Robyn Hood Annual - The Swarm (2021) (digital) (The Seeker-Empire).cbz +Azrael #025.cbr +Nita Hawes' Nightmare Blog 002 (2021) (Digital) (Zone-Empire).cbz +Dark Avengers-Uncanny X-Men - Utopia 001.cbr +Naughty List 004 (2022) (digital) (Son of Ultron-Empire).cbz +Atalante - La Légende-04-L'Envol Des Boréades.cbz +Warlord of Mars 02 (6 covers).cbr +Action Comics 857 (2007) (CamelotScans-DCP).cbr +War For Earth - 3 002 (2022) (Webrip) (The Last Kryptonian-DCP).cbz +Oracle - T04 - Le Malformé.cbz +Battle Angel Alita #9_ Vol. 9 - Yukito Kishiro.epub +Les aventuriers de l'intermonde - T01 - Mission Athènes.cbz +Captain_America_and_The_Secret_Avengers_(2011)_(Minutemen-DTermined).cbr +She-Hulk 002 (2022) (Digital) (Zone-Empire).cbz +infinity inc 01 (2007) (racerx-dcp).cbz +Wonder Girl 004 (2021) (digital) (Son of Ultron-Empire).cbz +SEULS - T07 - Les Terres Basses.cbr +Out of Body 003 (2021) (digital) (Son of Ultron-Empire).cbz +Power Girl 09.cbr +Thor 614 (2 covers) (2010) (noads) (Archangel & FP-CPS).cbr +Iron Man 011 (2021) (Digital) (Zone-Empire).cbz +Ms. Marvel - Beyond the Limit 002 (2022) (Digital) (Zone-Empire).cbz +Ultimate X-Men #038.cbr +Excalibur 022 (2021) (Digital) (Zone-Empire).cbz +New Avengers 025 (2006) (Fixed) (Team-DCP).cbr +T06.2 - Topkapi.pdf +Thor Corps 2 of 4.cbr +Shang-Chi - Brothers & Sisters Infinity Comic 003 (2021) (Digital-Mobile) (Infinity-Empire) (WebP).cbz +X-Men To Serve And Protect 01 of 04 2010 .cbr +08A - Blue Beetle 020.cbz +The Joker Presents - A Puzzlebox Director's Cut 013 (2021) (digital) (Son of Ultron-Empire).cbz +Alice Matheson - T01 - Jour Z.cbz diff --git a/API.Benchmark/ParserBenchmarks.cs b/API.Benchmark/ParserBenchmarks.cs index 63adc6985..d7706a3f4 100644 --- a/API.Benchmark/ParserBenchmarks.cs +++ b/API.Benchmark/ParserBenchmarks.cs @@ -5,75 +5,74 @@ using System.Text.RegularExpressions; using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Order; -namespace API.Benchmark +namespace API.Benchmark; + +[MemoryDiagnoser] +[Orderer(SummaryOrderPolicy.FastestToSlowest)] +[RankColumn] +public class ParserBenchmarks { - [MemoryDiagnoser] - [Orderer(SummaryOrderPolicy.FastestToSlowest)] - [RankColumn] - public class ParserBenchmarks + private readonly IList _names; + + private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]", + RegexOptions.IgnoreCase | RegexOptions.Compiled, + TimeSpan.FromMilliseconds(300)); + + private static readonly Regex IsEpub = new Regex(@"\.epub", + RegexOptions.IgnoreCase | RegexOptions.Compiled, + TimeSpan.FromMilliseconds(300)); + + public ParserBenchmarks() { - private readonly IList _names; - - private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - TimeSpan.FromMilliseconds(300)); - - private static readonly Regex IsEpub = new Regex(@"\.epub", - RegexOptions.IgnoreCase | RegexOptions.Compiled, - TimeSpan.FromMilliseconds(300)); - - public ParserBenchmarks() - { - // Read all series from SeriesNamesForNormalization.txt - _names = File.ReadAllLines("Data/SeriesNamesForNormalization.txt"); - Console.WriteLine($"Performing benchmark on {_names.Count} series"); - } - - private static string Normalize(string name) - { - // ReSharper disable once UnusedVariable - var ret = NormalizeRegex.Replace(name, string.Empty).ToLower(); - var normalized = NormalizeRegex.Replace(name, string.Empty).ToLower(); - return string.IsNullOrEmpty(normalized) ? name : normalized; - } - - - - [Benchmark] - public void TestNormalizeName() - { - foreach (var name in _names) - { - Normalize(name); - } - } - - - [Benchmark] - public void TestIsEpub() - { - foreach (var name in _names) - { - if ((name).ToLower() == ".epub") - { - /* No Operation */ - } - } - } - - [Benchmark] - public void TestIsEpub_New() - { - foreach (var name in _names) - { - - if (Path.GetExtension(name).Equals(".epub", StringComparison.InvariantCultureIgnoreCase)) - { - /* No Operation */ - } - } - } - - + // Read all series from SeriesNamesForNormalization.txt + _names = File.ReadAllLines("Data/SeriesNamesForNormalization.txt"); + Console.WriteLine($"Performing benchmark on {_names.Count} series"); } + + private static string Normalize(string name) + { + // ReSharper disable once UnusedVariable + var ret = NormalizeRegex.Replace(name, string.Empty).ToLower(); + var normalized = NormalizeRegex.Replace(name, string.Empty).ToLower(); + return string.IsNullOrEmpty(normalized) ? name : normalized; + } + + + + [Benchmark] + public void TestNormalizeName() + { + foreach (var name in _names) + { + Normalize(name); + } + } + + + [Benchmark] + public void TestIsEpub() + { + foreach (var name in _names) + { + if ((name).ToLower() == ".epub") + { + /* No Operation */ + } + } + } + + [Benchmark] + public void TestIsEpub_New() + { + foreach (var name in _names) + { + + if (Path.GetExtension(name).Equals(".epub", StringComparison.InvariantCultureIgnoreCase)) + { + /* No Operation */ + } + } + } + + } diff --git a/API.Benchmark/Program.cs b/API.Benchmark/Program.cs index 4a659a1b8..76ed97c70 100644 --- a/API.Benchmark/Program.cs +++ b/API.Benchmark/Program.cs @@ -1,22 +1,14 @@ using BenchmarkDotNet.Running; -namespace API.Benchmark -{ - /// - /// To build this, cd into API.Benchmark directory and run - /// dotnet build -c Release - /// then copy the outputted dll - /// dotnet copied_string\API.Benchmark.dll - /// - public static class Program - { - private static void Main(string[] args) - { - //BenchmarkRunner.Run(); - //BenchmarkRunner.Run(); - //BenchmarkRunner.Run(); - BenchmarkRunner.Run(); +namespace API.Benchmark; - } - } +/// +/// To build this, cd into API.Benchmark directory and run +/// dotnet build -c Release +/// then copy the outputted dll +/// dotnet copied_string\API.Benchmark.dll +/// +public static class Program +{ + private static void Main(string[] args) => BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); } diff --git a/API.Benchmark/TestBenchmark.cs b/API.Benchmark/TestBenchmark.cs index c5d2d18e1..0b4880690 100644 --- a/API.Benchmark/TestBenchmark.cs +++ b/API.Benchmark/TestBenchmark.cs @@ -6,61 +6,60 @@ using API.Extensions; using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Order; -namespace API.Benchmark +namespace API.Benchmark; + +/// +/// This is used as a scratchpad for testing +/// +[MemoryDiagnoser] +[Orderer(SummaryOrderPolicy.FastestToSlowest)] +[RankColumn] +public class TestBenchmark { - /// - /// This is used as a scratchpad for testing - /// - [MemoryDiagnoser] - [Orderer(SummaryOrderPolicy.FastestToSlowest)] - [RankColumn] - public class TestBenchmark + private static IEnumerable GenerateVolumes(int max) { - private static IEnumerable GenerateVolumes(int max) + var random = new Random(); + var maxIterations = random.Next(max) + 1; + var list = new List(); + for (var i = 0; i < maxIterations; i++) { - var random = new Random(); - var maxIterations = random.Next(max) + 1; - var list = new List(); - for (var i = 0; i < maxIterations; i++) + list.Add(new VolumeDto() { - list.Add(new VolumeDto() - { - Number = random.Next(10) > 5 ? 1 : 0, - Chapters = GenerateChapters() - }); - } - - return list; - } - - private static List GenerateChapters() - { - var list = new List(); - for (var i = 1; i < 40; i++) - { - list.Add(new ChapterDto() - { - Range = i + string.Empty - }); - } - - return list; - } - - private static void SortSpecialChapters(IEnumerable volumes) - { - foreach (var v in volumes.Where(vDto => vDto.Number == 0)) - { - v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList(); - } - } - - [Benchmark] - public void TestSortSpecialChapters() - { - var volumes = GenerateVolumes(10); - SortSpecialChapters(volumes); + Number = random.Next(10) > 5 ? 1 : 0, + Chapters = GenerateChapters() + }); } + return list; } + + private static List GenerateChapters() + { + var list = new List(); + for (var i = 1; i < 40; i++) + { + list.Add(new ChapterDto() + { + Range = i + string.Empty + }); + } + + return list; + } + + private static void SortSpecialChapters(IEnumerable volumes) + { + foreach (var v in volumes.Where(vDto => vDto.Number == 0)) + { + v.Chapters = v.Chapters.OrderByNatural(x => x.Range).ToList(); + } + } + + [Benchmark] + public void TestSortSpecialChapters() + { + var volumes = GenerateVolumes(10); + SortSpecialChapters(volumes); + } + } diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj index fba1b24f8..6380fc95f 100644 --- a/API.Tests/API.Tests.csproj +++ b/API.Tests/API.Tests.csproj @@ -7,10 +7,10 @@ - - + + - + runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/API.Tests/BasicTest.cs b/API.Tests/BasicTest.cs new file mode 100644 index 000000000..fb2f2bbf0 --- /dev/null +++ b/API.Tests/BasicTest.cs @@ -0,0 +1,118 @@ +using System.Collections.Generic; +using System.Data.Common; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.Entities; +using API.Entities.Enums; +using API.Helpers; +using API.Services; +using AutoMapper; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.Extensions.Logging; +using NSubstitute; + +namespace API.Tests; + +public abstract class BasicTest +{ + private readonly DbConnection _connection; + protected readonly DataContext _context; + protected readonly IUnitOfWork _unitOfWork; + + + protected const string CacheDirectory = "C:/kavita/config/cache/"; + protected const string CoverImageDirectory = "C:/kavita/config/covers/"; + protected const string BackupDirectory = "C:/kavita/config/backups/"; + protected const string LogDirectory = "C:/kavita/config/logs/"; + protected const string BookmarkDirectory = "C:/kavita/config/bookmarks/"; + protected const string TempDirectory = "C:/kavita/config/temp/"; + + protected BasicTest() + { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + var config = new MapperConfiguration(cfg => cfg.AddProfile()); + var mapper = config.CreateMapper(); + + _unitOfWork = new UnitOfWork(_context, mapper, null); + } + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync(); + setting.Value = BookmarkDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync(); + setting.Value = "10"; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = "C:/data/" + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + protected async Task ResetDb() + { + _context.Series.RemoveRange(_context.Series.ToList()); + _context.Users.RemoveRange(_context.Users.ToList()); + _context.AppUserBookmark.RemoveRange(_context.AppUserBookmark.ToList()); + + await _context.SaveChangesAsync(); + } + + protected static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(BookmarkDirectory); + fileSystem.AddDirectory(LogDirectory); + fileSystem.AddDirectory(TempDirectory); + fileSystem.AddDirectory("C:/data/"); + + return fileSystem; + } +} diff --git a/API.Tests/Comparers/ChapterSortComparerTest.cs b/API.Tests/Comparers/ChapterSortComparerTest.cs index 11fecf2c2..220be052d 100644 --- a/API.Tests/Comparers/ChapterSortComparerTest.cs +++ b/API.Tests/Comparers/ChapterSortComparerTest.cs @@ -2,18 +2,17 @@ using API.Comparators; using Xunit; -namespace API.Tests.Comparers +namespace API.Tests.Comparers; + +public class ChapterSortComparerTest { - public class ChapterSortComparerTest + [Theory] + [InlineData(new[] {1, 2, 0}, new[] {1, 2, 0})] + [InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})] + [InlineData(new[] {1, 0, 0}, new[] {1, 0, 0})] + public void ChapterSortTest(int[] input, int[] expected) { - [Theory] - [InlineData(new[] {1, 2, 0}, new[] {1, 2, 0})] - [InlineData(new[] {3, 1, 2}, new[] {1, 2, 3})] - [InlineData(new[] {1, 0, 0}, new[] {1, 0, 0})] - public void ChapterSortTest(int[] input, int[] expected) - { - Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparer()).ToArray()); - } - + Assert.Equal(expected, input.OrderBy(f => f, new ChapterSortComparer()).ToArray()); } -} \ No newline at end of file + +} diff --git a/API.Tests/Comparers/StringLogicalComparerTest.cs b/API.Tests/Comparers/StringLogicalComparerTest.cs index 3d13e43ac..13f88243d 100644 --- a/API.Tests/Comparers/StringLogicalComparerTest.cs +++ b/API.Tests/Comparers/StringLogicalComparerTest.cs @@ -2,33 +2,32 @@ using API.Comparators; using Xunit; -namespace API.Tests.Comparers -{ - public class StringLogicalComparerTest - { - [Theory] - [InlineData( - new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, - new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} - )] - [InlineData( - new[] {"a.jpg", "aaa.jpg", "1.jpg", }, - new[] {"1.jpg", "a.jpg", "aaa.jpg"} - )] - [InlineData( - new[] {"a.jpg", "aaa.jpg", "1.jpg", "!cover.png"}, - new[] {"!cover.png", "1.jpg", "a.jpg", "aaa.jpg"} - )] - public void StringComparer(string[] input, string[] expected) - { - Array.Sort(input, StringLogicalComparer.Compare); +namespace API.Tests.Comparers; - var i = 0; - foreach (var s in input) - { - Assert.Equal(s, expected[i]); - i++; - } +public class StringLogicalComparerTest +{ + [Theory] + [InlineData( + new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, + new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} + )] + [InlineData( + new[] {"a.jpg", "aaa.jpg", "1.jpg", }, + new[] {"1.jpg", "a.jpg", "aaa.jpg"} + )] + [InlineData( + new[] {"a.jpg", "aaa.jpg", "1.jpg", "!cover.png"}, + new[] {"!cover.png", "1.jpg", "a.jpg", "aaa.jpg"} + )] + public void StringComparer(string[] input, string[] expected) + { + Array.Sort(input, StringLogicalComparer.Compare); + + var i = 0; + foreach (var s in input) + { + Assert.Equal(s, expected[i]); + i++; } } } diff --git a/API.Tests/Converters/CronConverterTests.cs b/API.Tests/Converters/CronConverterTests.cs index 813d82426..4d26edef7 100644 --- a/API.Tests/Converters/CronConverterTests.cs +++ b/API.Tests/Converters/CronConverterTests.cs @@ -1,19 +1,18 @@ using API.Helpers.Converters; using Xunit; -namespace API.Tests.Converters +namespace API.Tests.Converters; + +public class CronConverterTests { - public class CronConverterTests + [Theory] + [InlineData("daily", "0 0 * * *")] + [InlineData("disabled", "0 0 31 2 *")] + [InlineData("weekly", "0 0 * * 1")] + [InlineData("", "0 0 31 2 *")] + [InlineData("sdfgdf", "")] + public void ConvertTest(string input, string expected) { - [Theory] - [InlineData("daily", "0 0 * * *")] - [InlineData("disabled", "0 0 31 2 *")] - [InlineData("weekly", "0 0 * * 1")] - [InlineData("", "0 0 31 2 *")] - [InlineData("sdfgdf", "")] - public void ConvertTest(string input, string expected) - { - Assert.Equal(expected, CronConverter.ConvertToCronNotation(input)); - } + Assert.Equal(expected, CronConverter.ConvertToCronNotation(input)); } } diff --git a/API.Tests/Entities/ComicInfoTests.cs b/API.Tests/Entities/ComicInfoTests.cs index 325299cf8..ea8b0187d 100644 --- a/API.Tests/Entities/ComicInfoTests.cs +++ b/API.Tests/Entities/ComicInfoTests.cs @@ -35,4 +35,62 @@ public class ComicInfoTests Assert.Equal(AgeRating.RatingPending, ComicInfo.ConvertAgeRatingToEnum("rating pending")); } #endregion + + + #region CalculatedCount + + [Fact] + public void CalculatedCount_ReturnsVolumeCount() + { + var ci = new ComicInfo() + { + Number = "5", + Volume = "10", + Count = 10 + }; + + Assert.Equal(5, ci.CalculatedCount()); + } + + [Fact] + public void CalculatedCount_ReturnsNoCountWhenCountNotSet() + { + var ci = new ComicInfo() + { + Number = "5", + Volume = "10", + Count = 0 + }; + + Assert.Equal(5, ci.CalculatedCount()); + } + + [Fact] + public void CalculatedCount_ReturnsNumberCount() + { + var ci = new ComicInfo() + { + Number = "5", + Volume = "", + Count = 10 + }; + + Assert.Equal(5, ci.CalculatedCount()); + } + + [Fact] + public void CalculatedCount_ReturnsNumberCount_OnlyWholeNumber() + { + var ci = new ComicInfo() + { + Number = "5.7", + Volume = "", + Count = 10 + }; + + Assert.Equal(5, ci.CalculatedCount()); + } + + + #endregion } diff --git a/API.Tests/Entities/SeriesTest.cs b/API.Tests/Entities/SeriesTest.cs index 70897b49f..0b49bd3dd 100644 --- a/API.Tests/Entities/SeriesTest.cs +++ b/API.Tests/Entities/SeriesTest.cs @@ -1,27 +1,26 @@ using API.Data; using Xunit; -namespace API.Tests.Entities +namespace API.Tests.Entities; + +/// +/// Tests for +/// +public class SeriesTest { - /// - /// Tests for - /// - public class SeriesTest + [Theory] + [InlineData("Darker than Black")] + public void CreateSeries(string name) { - [Theory] - [InlineData("Darker than Black")] - public void CreateSeries(string name) - { - var key = API.Services.Tasks.Scanner.Parser.Parser.Normalize(name); - var series = DbFactory.Series(name); - Assert.Equal(0, series.Id); - Assert.Equal(0, series.Pages); - Assert.Equal(name, series.Name); - Assert.Null(series.CoverImage); - Assert.Equal(name, series.LocalizedName); - Assert.Equal(name, series.SortName); - Assert.Equal(name, series.OriginalName); - Assert.Equal(key, series.NormalizedName); - } + var key = API.Services.Tasks.Scanner.Parser.Parser.Normalize(name); + var series = DbFactory.Series(name); + Assert.Equal(0, series.Id); + Assert.Equal(0, series.Pages); + Assert.Equal(name, series.Name); + Assert.Null(series.CoverImage); + Assert.Equal(name, series.LocalizedName); + Assert.Equal(name, series.SortName); + Assert.Equal(name, series.OriginalName); + Assert.Equal(key, series.NormalizedName); } -} \ No newline at end of file +} diff --git a/API.Tests/Extensions/ChapterListExtensionsTests.cs b/API.Tests/Extensions/ChapterListExtensionsTests.cs index a1beddf09..f6ea62408 100644 --- a/API.Tests/Extensions/ChapterListExtensionsTests.cs +++ b/API.Tests/Extensions/ChapterListExtensionsTests.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; using System.Linq; using API.Entities; @@ -6,140 +7,180 @@ using API.Extensions; using API.Parser; using Xunit; -namespace API.Tests.Extensions +namespace API.Tests.Extensions; + +public class ChapterListExtensionsTests { - public class ChapterListExtensionsTests + private static Chapter CreateChapter(string range, string number, MangaFile file, bool isSpecial) { - private static Chapter CreateChapter(string range, string number, MangaFile file, bool isSpecial) + return new Chapter() { - return new Chapter() - { - Range = range, - Number = number, - Files = new List() {file}, - IsSpecial = isSpecial - }; - } + Range = range, + Number = number, + Files = new List() {file}, + IsSpecial = isSpecial + }; + } - private static MangaFile CreateFile(string file, MangaFormat format) + private static MangaFile CreateFile(string file, MangaFormat format) + { + return new MangaFile() { - return new MangaFile() - { - FilePath = file, - Format = format - }; - } - - [Fact] - public void GetAnyChapterByRange_Test_ShouldBeNull() - { - var info = new ParserInfo() - { - Chapters = "0", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - Filename = "darker than black.cbz", - IsSpecial = false, - Series = "darker than black", - Title = "darker than black", - Volumes = "0" - }; - - var chapterList = new List() - { - CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true) - }; - - var actualChapter = chapterList.GetChapterByRange(info); - - Assert.NotEqual(chapterList[0], actualChapter); - - } - - [Fact] - public void GetAnyChapterByRange_Test_ShouldBeNotNull() - { - var info = new ParserInfo() - { - Chapters = "0", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - Filename = "darker than black.cbz", - IsSpecial = true, - Series = "darker than black", - Title = "darker than black", - Volumes = "0" - }; - - var chapterList = new List() - { - CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true) - }; - - var actualChapter = chapterList.GetChapterByRange(info); - - Assert.Equal(chapterList[0], actualChapter); - } - - [Fact] - public void GetChapterByRange_On_Duplicate_Files_Test_Should_Not_Error() - { - var info = new ParserInfo() - { - Chapters = "0", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/detective comics #001.cbz", - Filename = "detective comics #001.cbz", - IsSpecial = true, - Series = "detective comics", - Title = "detective comics", - Volumes = "0" - }; - - var chapterList = new List() - { - CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true), - CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true) - }; - - var actualChapter = chapterList.GetChapterByRange(info); - - Assert.Equal(chapterList[0], actualChapter); - - } - - #region GetFirstChapterWithFiles + FilePath = file, + Format = format + }; + } [Fact] - public void GetFirstChapterWithFiles_ShouldReturnAllChapters() + public void GetAnyChapterByRange_Test_ShouldBeNull() + { + var info = new ParserInfo() { - var chapterList = new List() - { - CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true), - CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false), - }; + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + Filename = "darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; - Assert.Equal(chapterList.First(), chapterList.GetFirstChapterWithFiles()); - } - - [Fact] - public void GetFirstChapterWithFiles_ShouldReturnSecondChapter() + var chapterList = new List() { - var chapterList = new List() - { - CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true), - CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false), - }; + CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true) + }; - chapterList.First().Files = new List(); + var actualChapter = chapterList.GetChapterByRange(info); - Assert.Equal(chapterList.Last(), chapterList.GetFirstChapterWithFiles()); - } + Assert.NotEqual(chapterList[0], actualChapter); - - #endregion } + + [Fact] + public void GetAnyChapterByRange_Test_ShouldBeNotNull() + { + var info = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + Filename = "darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var chapterList = new List() + { + CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true) + }; + + var actualChapter = chapterList.GetChapterByRange(info); + + Assert.Equal(chapterList[0], actualChapter); + } + + [Fact] + public void GetChapterByRange_On_Duplicate_Files_Test_Should_Not_Error() + { + var info = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/detective comics #001.cbz", + Filename = "detective comics #001.cbz", + IsSpecial = true, + Series = "detective comics", + Title = "detective comics", + Volumes = "0" + }; + + var chapterList = new List() + { + CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true), + CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true) + }; + + var actualChapter = chapterList.GetChapterByRange(info); + + Assert.Equal(chapterList[0], actualChapter); + } + + #region GetFirstChapterWithFiles + + [Fact] + public void GetFirstChapterWithFiles_ShouldReturnAllChapters() + { + var chapterList = new List() + { + CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true), + CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false), + }; + + Assert.Equal(chapterList.First(), chapterList.GetFirstChapterWithFiles()); + } + + [Fact] + public void GetFirstChapterWithFiles_ShouldReturnSecondChapter() + { + var chapterList = new List() + { + CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true), + CreateChapter("darker than black", "1", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), false), + }; + + chapterList.First().Files = new List(); + + Assert.Equal(chapterList.Last(), chapterList.GetFirstChapterWithFiles()); + } + + + #endregion + + #region MinimumReleaseYear + + [Fact] + public void MinimumReleaseYear_ZeroIfNoChapters() + { + var chapterList = new List(); + + Assert.Equal(0, chapterList.MinimumReleaseYear()); + } + + [Fact] + public void MinimumReleaseYear_ZeroIfNoValidDates() + { + var chapterList = new List() + { + CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true), + CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true) + }; + + chapterList[0].ReleaseDate = new DateTime(10, 1, 1); + chapterList[1].ReleaseDate = DateTime.MinValue; + + Assert.Equal(0, chapterList.MinimumReleaseYear()); + } + + [Fact] + public void MinimumReleaseYear_MinValidReleaseYear() + { + var chapterList = new List() + { + CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true), + CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true) + }; + + chapterList[0].ReleaseDate = new DateTime(2002, 1, 1); + chapterList[1].ReleaseDate = new DateTime(2012, 2, 1); + + Assert.Equal(2002, chapterList.MinimumReleaseYear()); + } + + #endregion } diff --git a/API.Tests/Extensions/EnumerableExtensionsTests.cs b/API.Tests/Extensions/EnumerableExtensionsTests.cs index 0f04ac9d7..e115d45f3 100644 --- a/API.Tests/Extensions/EnumerableExtensionsTests.cs +++ b/API.Tests/Extensions/EnumerableExtensionsTests.cs @@ -1,4 +1,7 @@ -using System.Linq; +using System.Collections.Generic; +using System.Linq; +using API.Data.Misc; +using API.Entities.Enums; using API.Extensions; using Xunit; @@ -132,4 +135,33 @@ public class EnumerableExtensionsTests i++; } } + + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new RecentlyAddedSeries() + { + AgeRating = AgeRating.Teen, + }, + new RecentlyAddedSeries() + { + AgeRating = AgeRating.Unknown, + }, + new RecentlyAddedSeries() + { + AgeRating = AgeRating.X18Plus, + }, + }; + + var filtered = items.RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } } diff --git a/API.Tests/Extensions/FileInfoExtensionsTests.cs b/API.Tests/Extensions/FileInfoExtensionsTests.cs index 5e17ecaeb..e708356a9 100644 --- a/API.Tests/Extensions/FileInfoExtensionsTests.cs +++ b/API.Tests/Extensions/FileInfoExtensionsTests.cs @@ -4,30 +4,29 @@ using System.IO; using API.Extensions; using Xunit; -namespace API.Tests.Extensions +namespace API.Tests.Extensions; + +public class FileInfoExtensionsTests { - public class FileInfoExtensionsTests + private static readonly string TestDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Extensions/Test Data/"); + + [Fact] + public void HasFileBeenModifiedSince_ShouldBeFalse() { - private static readonly string TestDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Extensions/Test Data/"); + var filepath = Path.Join(TestDirectory, "not modified.txt"); + var date = new FileInfo(filepath).LastWriteTime; + Assert.False(new FileInfo(filepath).HasFileBeenModifiedSince(date)); + File.ReadAllText(filepath); + Assert.False(new FileInfo(filepath).HasFileBeenModifiedSince(date)); + } - [Fact] - public void HasFileBeenModifiedSince_ShouldBeFalse() - { - var filepath = Path.Join(TestDirectory, "not modified.txt"); - var date = new FileInfo(filepath).LastWriteTime; - Assert.False(new FileInfo(filepath).HasFileBeenModifiedSince(date)); - File.ReadAllText(filepath); - Assert.False(new FileInfo(filepath).HasFileBeenModifiedSince(date)); - } - - [Fact] - public void HasFileBeenModifiedSince_ShouldBeTrue() - { - var filepath = Path.Join(TestDirectory, "modified on run.txt"); - var date = new FileInfo(filepath).LastWriteTime; - Assert.False(new FileInfo(filepath).HasFileBeenModifiedSince(date)); - File.AppendAllLines(filepath, new[] { DateTime.Now.ToString(CultureInfo.InvariantCulture) }); - Assert.True(new FileInfo(filepath).HasFileBeenModifiedSince(date)); - } + [Fact] + public void HasFileBeenModifiedSince_ShouldBeTrue() + { + var filepath = Path.Join(TestDirectory, "modified on run.txt"); + var date = new FileInfo(filepath).LastWriteTime; + Assert.False(new FileInfo(filepath).HasFileBeenModifiedSince(date)); + File.AppendAllLines(filepath, new[] { DateTime.Now.ToString(CultureInfo.InvariantCulture) }); + Assert.True(new FileInfo(filepath).HasFileBeenModifiedSince(date)); } } diff --git a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs index ff20403b1..b6a5ca362 100644 --- a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs +++ b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs @@ -5,49 +5,49 @@ using API.Entities.Enums; using API.Extensions; using API.Parser; using API.Services; +using API.Services.Tasks.Scanner.Parser; using API.Tests.Helpers; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; -namespace API.Tests.Extensions +namespace API.Tests.Extensions; + +public class ParserInfoListExtensions { - public class ParserInfoListExtensions + private readonly IDefaultParser _defaultParser; + public ParserInfoListExtensions() { - private readonly IDefaultParser _defaultParser; - public ParserInfoListExtensions() + _defaultParser = + new DefaultParser(new DirectoryService(Substitute.For>(), + new MockFileSystem())); + } + + [Theory] + [InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})] + public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers) + { + var infos = volumeNumbers.Select(n => new ParserInfo() {Volumes = n}).ToList(); + Assert.Equal(expectedNumbers, infos.DistinctVolumes()); + } + + [Theory] + [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] + [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] + [InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)] + public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo) + { + var infos = new List(); + foreach (var filename in inputInfos) { - _defaultParser = - new DefaultParser(new DirectoryService(Substitute.For>(), - new MockFileSystem())); + infos.Add(_defaultParser.Parse( + filename, + string.Empty)); } - [Theory] - [InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})] - public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers) - { - var infos = volumeNumbers.Select(n => new ParserInfo() {Volumes = n}).ToList(); - Assert.Equal(expectedNumbers, infos.DistinctVolumes()); - } + var files = inputChapters.Select(s => EntityFactory.CreateMangaFile(s, MangaFormat.Archive, 199)).ToList(); + var chapter = EntityFactory.CreateChapter("0-6", false, files); - [Theory] - [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] - [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] - [InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)] - public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo) - { - var infos = new List(); - foreach (var filename in inputInfos) - { - infos.Add(_defaultParser.Parse( - filename, - string.Empty)); - } - - var files = inputChapters.Select(s => EntityFactory.CreateMangaFile(s, MangaFormat.Archive, 199)).ToList(); - var chapter = EntityFactory.CreateChapter("0-6", false, files); - - Assert.Equal(expectedHasInfo, infos.HasInfo(chapter)); - } + Assert.Equal(expectedHasInfo, infos.HasInfo(chapter)); } } diff --git a/API.Tests/Extensions/QueryableExtensionsTests.cs b/API.Tests/Extensions/QueryableExtensionsTests.cs new file mode 100644 index 000000000..ee1ada416 --- /dev/null +++ b/API.Tests/Extensions/QueryableExtensionsTests.cs @@ -0,0 +1,284 @@ +using System.Collections.Generic; +using System.Linq; +using API.Data.Misc; +using API.Entities; +using API.Entities.Enums; +using API.Entities.Metadata; +using API.Extensions; +using Xunit; + +namespace API.Tests.Extensions; + +public class QueryableExtensionsTests +{ + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_Series_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new Series() + { + Metadata = new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + }, + new Series() + { + Metadata = new SeriesMetadata() + { + AgeRating = AgeRating.Unknown, + } + }, + new Series() + { + Metadata = new SeriesMetadata() + { + AgeRating = AgeRating.X18Plus, + } + }, + }; + + var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } + + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_CollectionTag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new CollectionTag() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new CollectionTag() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Unknown, + }, + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new CollectionTag() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.X18Plus, + } + } + }, + }; + + var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } + + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_Genre_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new Genre() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new Genre() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Unknown, + }, + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new Genre() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.X18Plus, + } + } + }, + }; + + var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } + + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_Tag_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new Tag() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new Tag() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Unknown, + }, + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new Tag() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.X18Plus, + } + } + }, + }; + + var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } + + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new Person() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new Person() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.Unknown, + }, + new SeriesMetadata() + { + AgeRating = AgeRating.Teen, + } + } + }, + new Person() + { + SeriesMetadatas = new List() + { + new SeriesMetadata() + { + AgeRating = AgeRating.X18Plus, + } + } + }, + }; + + var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } + + [Theory] + [InlineData(true, 2)] + [InlineData(false, 1)] + public void RestrictAgainstAgeRestriction_ReadingList_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount) + { + var items = new List() + { + new ReadingList() + { + AgeRating = AgeRating.Teen, + }, + new ReadingList() + { + AgeRating = AgeRating.Unknown, + }, + new ReadingList() + { + AgeRating = AgeRating.X18Plus + }, + }; + + var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction() + { + AgeRating = AgeRating.Teen, + IncludeUnknowns = includeUnknowns + }); + Assert.Equal(expectedCount, filtered.Count()); + } +} diff --git a/API.Tests/Extensions/SeriesExtensionsTests.cs b/API.Tests/Extensions/SeriesExtensionsTests.cs index b339b306d..f8dce8876 100644 --- a/API.Tests/Extensions/SeriesExtensionsTests.cs +++ b/API.Tests/Extensions/SeriesExtensionsTests.cs @@ -1,4 +1,6 @@ -using System.Linq; +using System.Collections.Generic; +using System.Linq; +using API.Comparators; using API.Entities; using API.Entities.Enums; using API.Entities.Metadata; @@ -7,86 +9,360 @@ using API.Parser; using API.Services.Tasks.Scanner; using Xunit; -namespace API.Tests.Extensions +namespace API.Tests.Extensions; + +public class SeriesExtensionsTests { - public class SeriesExtensionsTests + [Theory] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, true)] + // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, true)] + [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, true)] + public void NameInListTest(string[] seriesInput, string[] list, bool expected) { - [Theory] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)] - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, true)] - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, true)] - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, true)] - // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, true)] - [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, true)] - public void NameInListTest(string[] seriesInput, string[] list, bool expected) + var series = new Series() { - var series = new Series() - { - Name = seriesInput[0], - LocalizedName = seriesInput[1], - OriginalName = seriesInput[2], - NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]), - Metadata = new SeriesMetadata() - }; + Name = seriesInput[0], + LocalizedName = seriesInput[1], + OriginalName = seriesInput[2], + NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]), + Metadata = new SeriesMetadata() + }; - Assert.Equal(expected, series.NameInList(list)); - } + Assert.Equal(expected, series.NameInList(list)); + } - [Theory] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, MangaFormat.Archive, true)] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, MangaFormat.Archive, true)] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, MangaFormat.Archive, false)] - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, MangaFormat.Archive, true)] - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, MangaFormat.Archive, true)] - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, MangaFormat.Archive, true)] - // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works - [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, MangaFormat.Archive, true)] - [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, MangaFormat.Archive, true)] - public void NameInListParserInfoTest(string[] seriesInput, string[] list, MangaFormat format, bool expected) + [Theory] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, MangaFormat.Archive, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, MangaFormat.Archive, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, MangaFormat.Archive, false)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, MangaFormat.Archive, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, MangaFormat.Archive, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, MangaFormat.Archive, true)] + // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, MangaFormat.Archive, true)] + [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, MangaFormat.Archive, true)] + public void NameInListParserInfoTest(string[] seriesInput, string[] list, MangaFormat format, bool expected) + { + var series = new Series() { - var series = new Series() - { - Name = seriesInput[0], - LocalizedName = seriesInput[1], - OriginalName = seriesInput[2], - NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]), - Metadata = new SeriesMetadata(), - }; + Name = seriesInput[0], + LocalizedName = seriesInput[1], + OriginalName = seriesInput[2], + NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]), + Metadata = new SeriesMetadata(), + }; - var parserInfos = list.Select(s => new ParsedSeries() - { - Name = s, - NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(s), - }).ToList(); - - // This doesn't do any checks against format - Assert.Equal(expected, series.NameInList(parserInfos)); - } - - - [Theory] - [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, "Darker than Black", true)] - [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Kanojo, Okarishimasu", true)] - [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Rent", false)] - public void NameInParserInfoTest(string[] seriesInput, string parserSeries, bool expected) + var parserInfos = list.Select(s => new ParsedSeries() { - var series = new Series() + Name = s, + NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(s), + }).ToList(); + + // This doesn't do any checks against format + Assert.Equal(expected, series.NameInList(parserInfos)); + } + + + [Theory] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, "Darker than Black", true)] + [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Kanojo, Okarishimasu", true)] + [InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Rent", false)] + public void NameInParserInfoTest(string[] seriesInput, string parserSeries, bool expected) + { + var series = new Series() + { + Name = seriesInput[0], + LocalizedName = seriesInput[1], + OriginalName = seriesInput[2], + NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]), + Metadata = new SeriesMetadata() + }; + var info = new ParserInfo + { + Series = parserSeries + }; + + Assert.Equal(expected, series.NameInParserInfo(info)); + } + + [Fact] + public void GetCoverImage_MultipleSpecials_Comics() + { + var series = new Series() + { + Format = MangaFormat.Archive, + Volumes = new List() { - Name = seriesInput[0], - LocalizedName = seriesInput[1], - OriginalName = seriesInput[2], - NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Services.Tasks.Scanner.Parser.Parser.Normalize(seriesInput[0]), - Metadata = new SeriesMetadata() - }; - var info = new ParserInfo(); - info.Series = parserSeries; - - Assert.Equal(expected, series.NameInParserInfo(info)); - } + new Volume() + { + Number = 0, + Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume, + Chapters = new List() + { + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 1", + }, + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 2", + } + }, + } + } + }; + Assert.Equal("Special 1", series.GetCoverImage()); } + + [Fact] + public void GetCoverImage_MultipleSpecials_Books() + { + var series = new Series() + { + Format = MangaFormat.Epub, + Volumes = new List() + { + new Volume() + { + Number = 0, + Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume, + Chapters = new List() + { + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 1", + }, + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 2", + } + }, + } + } + }; + + Assert.Equal("Special 1", series.GetCoverImage()); + } + + [Fact] + public void GetCoverImage_JustChapters_Comics() + { + var series = new Series() + { + Format = MangaFormat.Archive, + Volumes = new List() + { + new Volume() + { + Number = 0, + Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume, + Chapters = new List() + { + new Chapter() + { + IsSpecial = false, + Number = "2.5", + CoverImage = "Special 1", + }, + new Chapter() + { + IsSpecial = false, + Number = "2", + CoverImage = "Special 2", + } + }, + } + } + }; + + foreach (var vol in series.Volumes) + { + vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage; + } + + Assert.Equal("Special 2", series.GetCoverImage()); + } + + [Fact] + public void GetCoverImage_JustChaptersAndSpecials_Comics() + { + var series = new Series() + { + Format = MangaFormat.Archive, + Volumes = new List() + { + new Volume() + { + Number = 0, + Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume, + Chapters = new List() + { + new Chapter() + { + IsSpecial = false, + Number = "2.5", + CoverImage = "Special 1", + }, + new Chapter() + { + IsSpecial = false, + Number = "2", + CoverImage = "Special 2", + }, + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 3", + } + }, + } + } + }; + + foreach (var vol in series.Volumes) + { + vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage; + } + + Assert.Equal("Special 2", series.GetCoverImage()); + } + + [Fact] + public void GetCoverImage_VolumesChapters_Comics() + { + var series = new Series() + { + Format = MangaFormat.Archive, + Volumes = new List() + { + new Volume() + { + Number = 0, + Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume, + Chapters = new List() + { + new Chapter() + { + IsSpecial = false, + Number = "2.5", + CoverImage = "Special 1", + }, + new Chapter() + { + IsSpecial = false, + Number = "2", + CoverImage = "Special 2", + }, + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 3", + } + }, + }, + new Volume() + { + Number = 1, + Name = "1", + Chapters = new List() + { + new Chapter() + { + IsSpecial = false, + Number = "0", + CoverImage = "Volume 1", + }, + + }, + } + } + }; + + foreach (var vol in series.Volumes) + { + vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage; + } + + Assert.Equal("Volume 1", series.GetCoverImage()); + } + + [Fact] + public void GetCoverImage_VolumesChaptersAndSpecials_Comics() + { + var series = new Series() + { + Format = MangaFormat.Archive, + Volumes = new List() + { + new Volume() + { + Number = 0, + Name = API.Services.Tasks.Scanner.Parser.Parser.DefaultVolume, + Chapters = new List() + { + new Chapter() + { + IsSpecial = false, + Number = "2.5", + CoverImage = "Special 1", + }, + new Chapter() + { + IsSpecial = false, + Number = "2", + CoverImage = "Special 2", + }, + new Chapter() + { + IsSpecial = true, + Number = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, + CoverImage = "Special 3", + } + }, + }, + new Volume() + { + Number = 1, + Name = "1", + Chapters = new List() + { + new Chapter() + { + IsSpecial = false, + Number = "0", + CoverImage = "Volume 1", + }, + + }, + } + } + }; + + foreach (var vol in series.Volumes) + { + vol.CoverImage = vol.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default)?.CoverImage; + } + + Assert.Equal("Volume 1", series.GetCoverImage()); + } + + } diff --git a/API.Tests/Helpers/CacheHelperTests.cs b/API.Tests/Helpers/CacheHelperTests.cs index 723742bc6..d78ed1601 100644 --- a/API.Tests/Helpers/CacheHelperTests.cs +++ b/API.Tests/Helpers/CacheHelperTests.cs @@ -165,7 +165,7 @@ public class CacheHelperTests FilePath = TestCoverArchive, LastModified = filesystemFile.LastWriteTime.DateTime }; - Assert.True(cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, file)); + Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file)); } [Fact] @@ -195,7 +195,7 @@ public class CacheHelperTests FilePath = TestCoverArchive, LastModified = filesystemFile.LastWriteTime.DateTime }; - Assert.True(cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, file)); + Assert.True(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file)); } [Fact] @@ -225,15 +225,16 @@ public class CacheHelperTests FilePath = TestCoverArchive, LastModified = filesystemFile.LastWriteTime.DateTime }; - Assert.False(cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, true, file)); + Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, true, file)); } [Fact] - public void HasFileNotChangedSinceCreationOrLastScan_ModifiedSinceLastScan() + public void IsFileUnmodifiedSinceCreationOrLastScan_ModifiedSinceLastScan() { var filesystemFile = new MockFileData("") { - LastWriteTime = DateTimeOffset.Now + LastWriteTime = DateTimeOffset.Now, + CreationTime = DateTimeOffset.Now }; var fileSystem = new MockFileSystem(new Dictionary { @@ -246,8 +247,8 @@ public class CacheHelperTests var chapter = new Chapter() { - Created = filesystemFile.LastWriteTime.DateTime.Subtract(TimeSpan.FromMinutes(10)), - LastModified = filesystemFile.LastWriteTime.DateTime.Subtract(TimeSpan.FromMinutes(10)) + Created = DateTime.Now.Subtract(TimeSpan.FromMinutes(10)), + LastModified = DateTime.Now.Subtract(TimeSpan.FromMinutes(10)) }; var file = new MangaFile() @@ -255,7 +256,7 @@ public class CacheHelperTests FilePath = Path.Join(TestCoverImageDirectory, TestCoverArchive), LastModified = filesystemFile.LastWriteTime.DateTime }; - Assert.False(cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, file)); + Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file)); } [Fact] @@ -276,8 +277,8 @@ public class CacheHelperTests var chapter = new Chapter() { - Created = filesystemFile.LastWriteTime.DateTime.Subtract(TimeSpan.FromMinutes(10)), - LastModified = filesystemFile.LastWriteTime.DateTime + Created = DateTime.Now.Subtract(TimeSpan.FromMinutes(10)), + LastModified = DateTime.Now }; var file = new MangaFile() @@ -285,7 +286,7 @@ public class CacheHelperTests FilePath = Path.Join(TestCoverImageDirectory, TestCoverArchive), LastModified = filesystemFile.LastWriteTime.DateTime }; - Assert.False(cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, file)); + Assert.False(cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, file)); } } diff --git a/API.Tests/Helpers/EntityFactory.cs b/API.Tests/Helpers/EntityFactory.cs index 55d947cf5..2f46cc1f4 100644 --- a/API.Tests/Helpers/EntityFactory.cs +++ b/API.Tests/Helpers/EntityFactory.cs @@ -4,80 +4,79 @@ using API.Entities; using API.Entities.Enums; using API.Entities.Metadata; -namespace API.Tests.Helpers +namespace API.Tests.Helpers; + +/// +/// Used to help quickly create DB entities for Unit Testing +/// +public static class EntityFactory { - /// - /// Used to help quickly create DB entities for Unit Testing - /// - public static class EntityFactory + public static Series CreateSeries(string name) { - public static Series CreateSeries(string name) + return new Series() { - return new Series() - { - Name = name, - SortName = name, - LocalizedName = name, - NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(name), - Volumes = new List(), - Metadata = new SeriesMetadata() - }; - } + Name = name, + SortName = name, + LocalizedName = name, + NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(name), + Volumes = new List(), + Metadata = new SeriesMetadata() + }; + } - public static Volume CreateVolume(string volumeNumber, List chapters = null) + public static Volume CreateVolume(string volumeNumber, List chapters = null) + { + var chaps = chapters ?? new List(); + var pages = chaps.Count > 0 ? chaps.Max(c => c.Pages) : 0; + return new Volume() { - var chaps = chapters ?? new List(); - var pages = chaps.Count > 0 ? chaps.Max(c => c.Pages) : 0; - return new Volume() - { - Name = volumeNumber, - Number = (int) API.Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(volumeNumber), - Pages = pages, - Chapters = chaps - }; - } + Name = volumeNumber, + Number = (int) API.Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(volumeNumber), + Pages = pages, + Chapters = chaps + }; + } - public static Chapter CreateChapter(string range, bool isSpecial, List files = null, int pageCount = 0) + public static Chapter CreateChapter(string range, bool isSpecial, List files = null, int pageCount = 0) + { + return new Chapter() { - return new Chapter() - { - IsSpecial = isSpecial, - Range = range, - Number = API.Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(range) + string.Empty, - Files = files ?? new List(), - Pages = pageCount, + IsSpecial = isSpecial, + Range = range, + Number = API.Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(range) + string.Empty, + Files = files ?? new List(), + Pages = pageCount, - }; - } + }; + } - public static MangaFile CreateMangaFile(string filename, MangaFormat format, int pages) + public static MangaFile CreateMangaFile(string filename, MangaFormat format, int pages) + { + return new MangaFile() { - return new MangaFile() - { - FilePath = filename, - Format = format, - Pages = pages - }; - } + FilePath = filename, + Format = format, + Pages = pages + }; + } - public static SeriesMetadata CreateSeriesMetadata(ICollection collectionTags) + public static SeriesMetadata CreateSeriesMetadata(ICollection collectionTags) + { + return new SeriesMetadata() { - return new SeriesMetadata() - { - CollectionTags = collectionTags - }; - } + CollectionTags = collectionTags + }; + } - public static CollectionTag CreateCollectionTag(int id, string title, string summary, bool promoted) + public static CollectionTag CreateCollectionTag(int id, string title, string summary, bool promoted) + { + return new CollectionTag() { - return new CollectionTag() - { - Id = id, - NormalizedTitle = API.Services.Tasks.Scanner.Parser.Parser.Normalize(title).ToUpper(), - Title = title, - Summary = summary, - Promoted = promoted - }; - } + Id = id, + NormalizedTitle = API.Services.Tasks.Scanner.Parser.Parser.Normalize(title).ToUpper(), + Title = title, + Summary = summary, + Promoted = promoted + }; } } diff --git a/API.Tests/Helpers/ParserInfoFactory.cs b/API.Tests/Helpers/ParserInfoFactory.cs index 4b4a8e22a..793b764b0 100644 --- a/API.Tests/Helpers/ParserInfoFactory.cs +++ b/API.Tests/Helpers/ParserInfoFactory.cs @@ -6,68 +6,67 @@ using API.Entities.Enums; using API.Parser; using API.Services.Tasks.Scanner; -namespace API.Tests.Helpers +namespace API.Tests.Helpers; + +public static class ParserInfoFactory { - public static class ParserInfoFactory + public static ParserInfo CreateParsedInfo(string series, string volumes, string chapters, string filename, bool isSpecial) { - public static ParserInfo CreateParsedInfo(string series, string volumes, string chapters, string filename, bool isSpecial) + return new ParserInfo() { - return new ParserInfo() - { - Chapters = chapters, - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = Path.Join(@"/manga/", filename), - Filename = filename, - IsSpecial = isSpecial, - Title = Path.GetFileNameWithoutExtension(filename), - Series = series, - Volumes = volumes - }; - } + Chapters = chapters, + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = Path.Join(@"/manga/", filename), + Filename = filename, + IsSpecial = isSpecial, + Title = Path.GetFileNameWithoutExtension(filename), + Series = series, + Volumes = volumes + }; + } - public static void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) + public static void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) + { + var existingKey = collectedSeries.Keys.FirstOrDefault(ps => + ps.Format == info.Format && ps.NormalizedName == API.Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series)); + existingKey ??= new ParsedSeries() { - var existingKey = collectedSeries.Keys.FirstOrDefault(ps => - ps.Format == info.Format && ps.NormalizedName == API.Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series)); - existingKey ??= new ParsedSeries() + Format = info.Format, + Name = info.Series, + NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) + }; + if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>)) + { + ((ConcurrentDictionary>) collectedSeries).AddOrUpdate(existingKey, new List() {info}, (_, oldValue) => { - Format = info.Format, - Name = info.Series, - NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) - }; - if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>)) - { - ((ConcurrentDictionary>) collectedSeries).AddOrUpdate(existingKey, new List() {info}, (_, oldValue) => + oldValue ??= new List(); + if (!oldValue.Contains(info)) { - oldValue ??= new List(); - if (!oldValue.Contains(info)) - { - oldValue.Add(info); - } + oldValue.Add(info); + } - return oldValue; - }); + return oldValue; + }); + } + else + { + if (!collectedSeries.ContainsKey(existingKey)) + { + collectedSeries.Add(existingKey, new List() {info}); } else { - if (!collectedSeries.ContainsKey(existingKey)) + var list = collectedSeries[existingKey]; + if (!list.Contains(info)) { - collectedSeries.Add(existingKey, new List() {info}); - } - else - { - var list = collectedSeries[existingKey]; - if (!list.Contains(info)) - { - list.Add(info); - } - - collectedSeries[existingKey] = list; + list.Add(info); } + collectedSeries[existingKey] = list; } } + } } diff --git a/API.Tests/Helpers/TestCaseGenerator.cs b/API.Tests/Helpers/TestCaseGenerator.cs index 41b99e5e4..833da0502 100644 --- a/API.Tests/Helpers/TestCaseGenerator.cs +++ b/API.Tests/Helpers/TestCaseGenerator.cs @@ -1,53 +1,52 @@ using System.IO; -namespace API.Tests.Helpers +namespace API.Tests.Helpers; + +/// +/// Given a -testcase.txt file, will generate a folder with fake archive or book files. These files are just renamed txt files. +/// This currently is broken - you cannot create files from a unit test it seems +/// +public static class TestCaseGenerator { - /// - /// Given a -testcase.txt file, will generate a folder with fake archive or book files. These files are just renamed txt files. - /// This currently is broken - you cannot create files from a unit test it seems - /// - public static class TestCaseGenerator + public static string GenerateFiles(string directory, string fileToExpand) { - public static string GenerateFiles(string directory, string fileToExpand) + //var files = Directory.GetFiles(directory, fileToExpand); + var file = new FileInfo(fileToExpand); + if (!file.Exists && file.Name.EndsWith("-testcase.txt")) return string.Empty; + + var baseDirectory = TestCaseGenerator.CreateTestBase(fileToExpand, directory); + var filesToCreate = File.ReadLines(file.FullName); + foreach (var fileToCreate in filesToCreate) { - //var files = Directory.GetFiles(directory, fileToExpand); - var file = new FileInfo(fileToExpand); - if (!file.Exists && file.Name.EndsWith("-testcase.txt")) return string.Empty; - - var baseDirectory = TestCaseGenerator.CreateTestBase(fileToExpand, directory); - var filesToCreate = File.ReadLines(file.FullName); - foreach (var fileToCreate in filesToCreate) - { - // var folders = DirectoryService.GetFoldersTillRoot(directory, fileToCreate); - // foreach (var VARIABLE in COLLECTION) - // { - // - // } - File.Create(fileToCreate); - } - - - - - return baseDirectory; + // var folders = DirectoryService.GetFoldersTillRoot(directory, fileToCreate); + // foreach (var VARIABLE in COLLECTION) + // { + // + // } + File.Create(fileToCreate); } - /// - /// Creates and returns a new base directory for data creation for a given testcase - /// - /// - /// - /// - private static string CreateTestBase(string file, string rootDirectory) - { - var baseDir = file.Split("-testcase.txt")[0]; - var newDirectory = Path.Join(rootDirectory, baseDir); - if (!Directory.Exists(newDirectory)) - { - new DirectoryInfo(newDirectory).Create(); - } - return newDirectory; - } + + + return baseDirectory; } -} \ No newline at end of file + + /// + /// Creates and returns a new base directory for data creation for a given testcase + /// + /// + /// + /// + private static string CreateTestBase(string file, string rootDirectory) + { + var baseDir = file.Split("-testcase.txt")[0]; + var newDirectory = Path.Join(rootDirectory, baseDir); + if (!Directory.Exists(newDirectory)) + { + new DirectoryInfo(newDirectory).Create(); + } + + return newDirectory; + } +} diff --git a/API.Tests/Parser/BookParserTests.cs b/API.Tests/Parser/BookParserTests.cs index 23b9c6e63..003dbfecc 100644 --- a/API.Tests/Parser/BookParserTests.cs +++ b/API.Tests/Parser/BookParserTests.cs @@ -1,43 +1,42 @@ using Xunit; -namespace API.Tests.Parser +namespace API.Tests.Parser; + +public class BookParserTests { - public class BookParserTests + [Theory] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")] + [InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")] + [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")] + public void ParseSeriesTest(string filename, string expected) { - [Theory] - [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")] - [InlineData("BBC Focus 00 The Science of Happiness 2nd Edition (2018)", "BBC Focus 00 The Science of Happiness 2nd Edition")] - [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "Faust")] - public void ParseSeriesTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename)); - } - - [Theory] - [InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")] - [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")] - public void ParseVolumeTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename)); - } - - // [Theory] - // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")] - // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")] - // public void ReplaceFontSrcUrl(string input, string expected) - // { - // var apiBase = "TEST/"; - // var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3"); - // Assert.Equal(expected, actual); - // } - // - // [Theory] - // [InlineData("@import url('font.css');", "@import url('TEST/font.css');")] - // public void ReplaceImportSrcUrl(string input, string expected) - // { - // var apiBase = "TEST/"; - // var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3"); - // Assert.Equal(expected, actual); - // } + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename)); } + + [Theory] + [InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")] + [InlineData("Faust - Volume 01 [Del Rey][Scans_Compressed]", "1")] + public void ParseVolumeTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename)); + } + + // [Theory] + // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")] + // [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")] + // public void ReplaceFontSrcUrl(string input, string expected) + // { + // var apiBase = "TEST/"; + // var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3"); + // Assert.Equal(expected, actual); + // } + // + // [Theory] + // [InlineData("@import url('font.css');", "@import url('TEST/font.css');")] + // public void ReplaceImportSrcUrl(string input, string expected) + // { + // var apiBase = "TEST/"; + // var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3"); + // Assert.Equal(expected, actual); + // } } diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs index 74a2b8bb2..fa0448ff9 100644 --- a/API.Tests/Parser/ComicParserTests.cs +++ b/API.Tests/Parser/ComicParserTests.cs @@ -1,196 +1,209 @@ using System.IO.Abstractions.TestingHelpers; using API.Parser; using API.Services; +using API.Services.Tasks.Scanner.Parser; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; using Xunit.Abstractions; -namespace API.Tests.Parser +namespace API.Tests.Parser; + +public class ComicParserTests { - public class ComicParserTests + private readonly ITestOutputHelper _testOutputHelper; + private readonly DefaultParser _defaultParser; + + public ComicParserTests(ITestOutputHelper testOutputHelper) { - private readonly ITestOutputHelper _testOutputHelper; - private readonly DefaultParser _defaultParser; + _testOutputHelper = testOutputHelper; + _defaultParser = + new DefaultParser(new DirectoryService(Substitute.For>(), + new MockFileSystem())); + } - public ComicParserTests(ITestOutputHelper testOutputHelper) - { - _testOutputHelper = testOutputHelper; - _defaultParser = - new DefaultParser(new DirectoryService(Substitute.For>(), - new MockFileSystem())); - } + [Theory] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")] + [InlineData("Batman & Daredevil - King of New York", "Batman & Daredevil - King of New York")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "Batman & Grendel")] + [InlineData("Batman & Robin the Teen Wonder #0", "Batman & Robin the Teen Wonder")] + [InlineData("Batman & Wildcat (1 of 3)", "Batman & Wildcat")] + [InlineData("Batman And Superman World's Finest #01", "Batman And Superman World's Finest")] + [InlineData("Babe 01", "Babe")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "Scott Pilgrim")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] + [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] + [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] + [InlineData("Amazing Man Comics chapter 25", "Amazing Man Comics")] + [InlineData("Amazing Man Comics issue #25", "Amazing Man Comics")] + [InlineData("Teen Titans v1 038 (1972) (c2c).cbr", "Teen Titans")] + [InlineData("Batman Beyond 02 (of 6) (1999)", "Batman Beyond")] + [InlineData("Batman Beyond - Return of the Joker (2001)", "Batman Beyond - Return of the Joker")] + [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "Invincible")] + [InlineData("Batman Wayne Family Adventures - Ep. 001 - Moving In", "Batman Wayne Family Adventures")] + [InlineData("Saga 001 (2012) (Digital) (Empire-Zone).cbr", "Saga")] + [InlineData("spawn-123", "spawn")] + [InlineData("spawn-chapter-123", "spawn")] + [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "Spawn")] + [InlineData("Batman Beyond 04 (of 6) (1999)", "Batman Beyond")] + [InlineData("Batman Beyond 001 (2012)", "Batman Beyond")] + [InlineData("Batman Beyond 2.0 001 (2013)", "Batman Beyond 2.0")] + [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "Batman - Catwoman")] + [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "Chew")] + [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "Chew Script Book")] + [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", "Batman - Detective Comics - Rebirth Deluxe Edition Book")] + [InlineData("Cyberpunk 2077 - Your Voice #01", "Cyberpunk 2077 - Your Voice")] + [InlineData("Cyberpunk 2077 #01", "Cyberpunk 2077")] + [InlineData("Cyberpunk 2077 - Trauma Team #04.cbz", "Cyberpunk 2077 - Trauma Team")] + [InlineData("Batgirl Vol.2000 #57 (December, 2004)", "Batgirl")] + [InlineData("Batgirl V2000 #57", "Batgirl")] + [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire)", "Fables")] + [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "2000 AD")] + [InlineData("Daredevil - v6 - 10 - (2019)", "Daredevil")] + [InlineData("Batman - The Man Who Laughs #1 (2005)", "Batman - The Man Who Laughs")] + [InlineData("Demon 012 (Sep 1973) c2c", "Demon")] + [InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")] + [InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")] + [InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")] + [InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")] + [InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")] + [InlineData("Batgirl T2000 #57", "Batgirl")] + [InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] + [InlineData("Conquistador_-Tome_2", "Conquistador")] + [InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")] + [InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")] + [InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")] + [InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")] + [InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")] + [InlineData("Kebab Том 1 Глава 1", "Kebab")] + [InlineData("Манга Глава 1", "Манга")] + public void ParseComicSeriesTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename)); + } - [Theory] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")] - [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")] - [InlineData("Batman & Daredevil - King of New York", "Batman & Daredevil - King of New York")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "Batman & Grendel")] - [InlineData("Batman & Robin the Teen Wonder #0", "Batman & Robin the Teen Wonder")] - [InlineData("Batman & Wildcat (1 of 3)", "Batman & Wildcat")] - [InlineData("Batman And Superman World's Finest #01", "Batman And Superman World's Finest")] - [InlineData("Babe 01", "Babe")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "Scott Pilgrim")] - [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] - [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] - [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] - [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] - [InlineData("Amazing Man Comics chapter 25", "Amazing Man Comics")] - [InlineData("Amazing Man Comics issue #25", "Amazing Man Comics")] - [InlineData("Teen Titans v1 038 (1972) (c2c).cbr", "Teen Titans")] - [InlineData("Batman Beyond 02 (of 6) (1999)", "Batman Beyond")] - [InlineData("Batman Beyond - Return of the Joker (2001)", "Batman Beyond - Return of the Joker")] - [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "Invincible")] - [InlineData("Batman Wayne Family Adventures - Ep. 001 - Moving In", "Batman Wayne Family Adventures")] - [InlineData("Saga 001 (2012) (Digital) (Empire-Zone).cbr", "Saga")] - [InlineData("spawn-123", "spawn")] - [InlineData("spawn-chapter-123", "spawn")] - [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "Spawn")] - [InlineData("Batman Beyond 04 (of 6) (1999)", "Batman Beyond")] - [InlineData("Batman Beyond 001 (2012)", "Batman Beyond")] - [InlineData("Batman Beyond 2.0 001 (2013)", "Batman Beyond 2.0")] - [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "Batman - Catwoman")] - [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "Chew")] - [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "Chew Script Book")] - [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", "Batman - Detective Comics - Rebirth Deluxe Edition Book")] - [InlineData("Cyberpunk 2077 - Your Voice #01", "Cyberpunk 2077 - Your Voice")] - [InlineData("Cyberpunk 2077 #01", "Cyberpunk 2077")] - [InlineData("Cyberpunk 2077 - Trauma Team #04.cbz", "Cyberpunk 2077 - Trauma Team")] - [InlineData("Batgirl Vol.2000 #57 (December, 2004)", "Batgirl")] - [InlineData("Batgirl V2000 #57", "Batgirl")] - [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire)", "Fables")] - [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "2000 AD")] - [InlineData("Daredevil - v6 - 10 - (2019)", "Daredevil")] - [InlineData("Batman - The Man Who Laughs #1 (2005)", "Batman - The Man Who Laughs")] - [InlineData("Demon 012 (Sep 1973) c2c", "Demon")] - [InlineData("Dragon Age - Until We Sleep 01 (of 03)", "Dragon Age - Until We Sleep")] - [InlineData("Green Lantern v2 017 - The Spy-Eye that doomed Green Lantern v2", "Green Lantern")] - [InlineData("Green Lantern - Circle of Fire Special - Adam Strange (2000)", "Green Lantern - Circle of Fire - Adam Strange")] - [InlineData("Identity Crisis Extra - Rags Morales Sketches (2005)", "Identity Crisis - Rags Morales Sketches")] - [InlineData("Daredevil - t6 - 10 - (2019)", "Daredevil")] - [InlineData("Batgirl T2000 #57", "Batgirl")] - [InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] - [InlineData("Conquistador_-Tome_2", "Conquistador")] - [InlineData("Max_l_explorateur-_Tome_0", "Max l explorateur")] - [InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")] - [InlineData("Bd Fr-Aldebaran-Antares-t6", "Aldebaran-Antares")] - [InlineData("Tintin - T22 Vol 714 pour Sydney", "Tintin")] - [InlineData("Fables 2010 Vol. 1 Legends in Exile", "Fables 2010")] - public void ParseComicSeriesTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename)); - } + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")] + [InlineData("Batman & Daredevil - King of New York", "0")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")] + [InlineData("Batman & Robin the Teen Wonder #0", "0")] + [InlineData("Batman & Wildcat (1 of 3)", "0")] + [InlineData("Batman And Superman World's Finest #01", "0")] + [InlineData("Babe 01", "0")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")] + [InlineData("Superman v1 024 (09-10 1943)", "1")] + [InlineData("Amazing Man Comics chapter 25", "0")] + [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")] + [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")] + [InlineData("spawn-123", "0")] + [InlineData("spawn-chapter-123", "0")] + [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")] + [InlineData("Batman Beyond 04 (of 6) (1999)", "0")] + [InlineData("Batman Beyond 001 (2012)", "0")] + [InlineData("Batman Beyond 2.0 001 (2013)", "0")] + [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")] + [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")] + [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")] + [InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")] + [InlineData("Batgirl V2000 #57", "2000")] + [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")] + [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")] + [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")] + [InlineData("Daredevil - v6 - 10 - (2019)", "6")] + // Tome Tests + [InlineData("Daredevil - t6 - 10 - (2019)", "6")] + [InlineData("Batgirl T2000 #57", "2000")] + [InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Conquistador_Tome_2", "2")] + [InlineData("Max_l_explorateur-_Tome_0", "0")] + [InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")] + [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "0")] + [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")] + // Russian Tests + [InlineData("Kebab Том 1 Глава 3", "1")] + [InlineData("Манга Глава 2", "0")] + public void ParseComicVolumeTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename)); + } - [Theory] - [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] - [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")] - [InlineData("Batman & Daredevil - King of New York", "0")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")] - [InlineData("Batman & Robin the Teen Wonder #0", "0")] - [InlineData("Batman & Wildcat (1 of 3)", "0")] - [InlineData("Batman And Superman World's Finest #01", "0")] - [InlineData("Babe 01", "0")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")] - [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] - [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "0")] - [InlineData("Superman v1 024 (09-10 1943)", "1")] - [InlineData("Amazing Man Comics chapter 25", "0")] - [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")] - [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")] - [InlineData("spawn-123", "0")] - [InlineData("spawn-chapter-123", "0")] - [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")] - [InlineData("Batman Beyond 04 (of 6) (1999)", "0")] - [InlineData("Batman Beyond 001 (2012)", "0")] - [InlineData("Batman Beyond 2.0 001 (2013)", "0")] - [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")] - [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")] - [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")] - [InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")] - [InlineData("Batgirl V2000 #57", "2000")] - [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")] - [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")] - [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")] - [InlineData("Daredevil - v6 - 10 - (2019)", "6")] - // Tome Tests - [InlineData("Daredevil - t6 - 10 - (2019)", "6")] - [InlineData("Batgirl T2000 #57", "2000")] - [InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] - [InlineData("Conquistador_Tome_2", "2")] - [InlineData("Max_l_explorateur-_Tome_0", "0")] - [InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")] - [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "0")] - [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")] - public void ParseComicVolumeTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename)); - } - - [Theory] - [InlineData("01 Spider-Man & Wolverine 01.cbr", "1")] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] - [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "1")] - [InlineData("Batman & Daredevil - King of New York", "0")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] - [InlineData("Batman & Robin the Teen Wonder #0", "0")] - [InlineData("Batman & Wildcat (1 of 3)", "1")] - [InlineData("Batman & Wildcat (2 of 3)", "2")] - [InlineData("Batman And Superman World's Finest #01", "1")] - [InlineData("Babe 01", "1")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] - [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] - [InlineData("Superman v1 024 (09-10 1943)", "24")] - [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] - [InlineData("Amazing Man Comics chapter 25", "25")] - [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "33.5")] - [InlineData("Batman Wayne Family Adventures - Ep. 014 - Moving In", "14")] - [InlineData("Saga 001 (2012) (Digital) (Empire-Zone)", "1")] - [InlineData("spawn-123", "123")] - [InlineData("spawn-chapter-123", "123")] - [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "62")] - [InlineData("Batman Beyond 04 (of 6) (1999)", "4")] - [InlineData("Invincible 052 (c2c) (2008) (Minutemen-TheCouple)", "52")] - [InlineData("Y - The Last Man #001", "1")] - [InlineData("Batman Beyond 001 (2012)", "1")] - [InlineData("Batman Beyond 2.0 001 (2013)", "1")] - [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")] - [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")] - [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")] - [InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")] - [InlineData("Batgirl V2000 #57", "57")] - [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")] - [InlineData("Cyberpunk 2077 - Trauma Team #04.cbz", "4")] - [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "366")] - [InlineData("Daredevil - v6 - 10 - (2019)", "10")] - [InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")] - [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")] - [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "0")] - public void ParseComicChapterTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(filename)); - } + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "1")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "1")] + [InlineData("Batman & Daredevil - King of New York", "0")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] + [InlineData("Batman & Robin the Teen Wonder #0", "0")] + [InlineData("Batman & Wildcat (1 of 3)", "1")] + [InlineData("Batman & Wildcat (2 of 3)", "2")] + [InlineData("Batman And Superman World's Finest #01", "1")] + [InlineData("Babe 01", "1")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Superman v1 024 (09-10 1943)", "24")] + [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] + [InlineData("Amazing Man Comics chapter 25", "25")] + [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "33.5")] + [InlineData("Batman Wayne Family Adventures - Ep. 014 - Moving In", "14")] + [InlineData("Saga 001 (2012) (Digital) (Empire-Zone)", "1")] + [InlineData("spawn-123", "123")] + [InlineData("spawn-chapter-123", "123")] + [InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "62")] + [InlineData("Batman Beyond 04 (of 6) (1999)", "4")] + [InlineData("Invincible 052 (c2c) (2008) (Minutemen-TheCouple)", "52")] + [InlineData("Y - The Last Man #001", "1")] + [InlineData("Batman Beyond 001 (2012)", "1")] + [InlineData("Batman Beyond 2.0 001 (2013)", "1")] + [InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")] + [InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")] + [InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")] + [InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")] + [InlineData("Batgirl V2000 #57", "57")] + [InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")] + [InlineData("Cyberpunk 2077 - Trauma Team #04.cbz", "4")] + [InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "366")] + [InlineData("Daredevil - v6 - 10 - (2019)", "10")] + [InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")] + [InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")] + [InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "0")] + [InlineData("Kebab Том 1 Глава 3", "3")] + [InlineData("Манга Глава 2", "2")] + [InlineData("Манга 2 Глава", "2")] + [InlineData("Манга Том 1 2 Глава", "2")] + public void ParseComicChapterTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(filename)); + } - [Theory] - [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)] - [InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)] - [InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)] - [InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)] - [InlineData("Boule et Bill - THS -Bill à disparu", true)] - [InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)] - [InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)] - [InlineData("laughs", false)] - [InlineData("Annual Days of Summer", false)] - [InlineData("Adventure Time 2013 Annual #001 (2013)", true)] - [InlineData("Adventure Time 2013_Annual_#001 (2013)", true)] - [InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)] - public void ParseComicSpecialTest(string input, bool expected) - { - Assert.Equal(expected, !string.IsNullOrEmpty(API.Services.Tasks.Scanner.Parser.Parser.ParseComicSpecial(input))); - } + [Theory] + [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)] + [InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)] + [InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)] + [InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)] + [InlineData("Boule et Bill - THS -Bill à disparu", true)] + [InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)] + [InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)] + [InlineData("laughs", false)] + [InlineData("Annual Days of Summer", false)] + [InlineData("Adventure Time 2013 Annual #001 (2013)", true)] + [InlineData("Adventure Time 2013_Annual_#001 (2013)", true)] + [InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)] + [InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)] + [InlineData("Mazebook 001", false)] + [InlineData("X-23 One Shot (2010)", true)] + [InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)] + public void IsComicSpecialTest(string input, bool expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsComicSpecial(input)); } } diff --git a/API.Tests/Parser/DefaultParserTests.cs b/API.Tests/Parser/DefaultParserTests.cs index f32838dd3..2640aa6c2 100644 --- a/API.Tests/Parser/DefaultParserTests.cs +++ b/API.Tests/Parser/DefaultParserTests.cs @@ -3,6 +3,7 @@ using System.IO.Abstractions.TestingHelpers; using API.Entities.Enums; using API.Parser; using API.Services; +using API.Services.Tasks.Scanner.Parser; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; @@ -77,6 +78,21 @@ public class DefaultParserTests Assert.Equal(expectedParseInfo, actual.Series); } + [Theory] + [InlineData("/manga/Btooom!/Specials/Art Book.cbz", "Btooom!")] + public void ParseFromFallbackFolders_ShouldUseExistingSeriesName_NewScanLoop(string inputFile, string expectedParseInfo) + { + const string rootDirectory = "/manga/"; + var fs = new MockFileSystem(); + fs.AddDirectory(rootDirectory); + fs.AddFile(inputFile, new MockFileData("")); + var ds = new DirectoryService(Substitute.For>(), fs); + var parser = new DefaultParser(ds); + var actual = parser.Parse(inputFile, rootDirectory); + _defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual); + Assert.Equal(expectedParseInfo, actual.Series); + } + #endregion @@ -87,6 +103,7 @@ public class DefaultParserTests { const string rootPath = @"E:/Manga/"; var expected = new Dictionary(); + var filepath = @"E:/Manga/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz"; expected.Add(filepath, new ParserInfo { @@ -199,14 +216,6 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); - filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub"; - expected.Add(filepath, new ParserInfo - { - Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "", - Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub, - FullFilePath = filepath, IsSpecial = false - }); - // If an image is cover exclusively, ignore it filepath = @"E:\Manga\Seraph of the End\cover.png"; expected.Add(filepath, null); @@ -219,11 +228,12 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); + // Note: Fallback to folder will parse Monster #8 and get Monster filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg"; expected.Add(filepath, new ParserInfo { - Series = "Monster #8", Volumes = "0", Edition = "", - Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Archive, + Series = "Monster", Volumes = "0", Edition = "", + Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Image, FullFilePath = filepath, IsSpecial = false }); @@ -235,6 +245,29 @@ public class DefaultParserTests FullFilePath = filepath, IsSpecial = false }); + filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch186\Vol. 19 p106.gif"; + expected.Add(filepath, new ParserInfo + { + Series = "Just Images the second", Volumes = "19", Edition = "", + Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch186\Vol. 19 p106.gif"; + expected.Add(filepath, new ParserInfo + { + Series = "Just Images the second", Volumes = "19", Edition = "", + Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub"; + expected.Add(filepath, new ParserInfo + { + Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "", + Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub, + FullFilePath = filepath, IsSpecial = false + }); foreach (var file in expected.Keys) { @@ -243,7 +276,7 @@ public class DefaultParserTests if (expectedInfo == null) { Assert.Null(actual); - return; + continue; } Assert.NotNull(actual); _testOutputHelper.WriteLine($"Validating {file}"); @@ -383,7 +416,7 @@ public class DefaultParserTests if (expectedInfo == null) { Assert.Null(actual); - return; + continue; } Assert.NotNull(actual); _testOutputHelper.WriteLine($"Validating {file}"); diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index 12e312661..89b1112f5 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -1,321 +1,334 @@ +using System.Runtime.InteropServices; using API.Entities.Enums; using Xunit; using Xunit.Abstractions; -namespace API.Tests.Parser +namespace API.Tests.Parser; + +public class MangaParserTests { - public class MangaParserTests + private readonly ITestOutputHelper _testOutputHelper; + + public MangaParserTests(ITestOutputHelper testOutputHelper) { - private readonly ITestOutputHelper _testOutputHelper; - - public MangaParserTests(ITestOutputHelper testOutputHelper) - { - _testOutputHelper = testOutputHelper; - } - - [Theory] - [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")] - [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")] - [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "11")] - [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "1")] - [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "1")] - [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1")] - [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "16-17")] - [InlineData("Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", "1")] - [InlineData("v001", "1")] - [InlineData("Vol 1", "1")] - [InlineData("vol_356-1", "356")] // Mangapy syntax - [InlineData("No Volume", "0")] - [InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")] - [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1")] - [InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")] - [InlineData("[WS]_Ichiban_Ushiro_no_Daimaou_v02_ch10.zip", "2")] - [InlineData("[xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans]", "1")] - [InlineData("Tower Of God S01 014 (CBT) (digital).cbz", "1")] - [InlineData("Tenjou_Tenge_v17_c100[MT].zip", "17")] - [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1")] - [InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "2")] - [InlineData("Mujaki no Rakuen Vol12 ch76", "12")] - [InlineData("Ichinensei_ni_Nacchattara_v02_ch11_[Taruby]_v1.3.zip", "2")] - [InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")] - [InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")] - [InlineData("Dorohedoro v12 (2013) (Digital) (LostNerevarine-Empire).cbz", "12")] - [InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")] - [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")] - [InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")] - [InlineData("VanDread-v01-c001[MD].zip", "1")] - [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")] - [InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")] - [InlineData("Kodomo no Jikan vol. 1.cbz", "1")] - [InlineData("Kodomo no Jikan vol. 10.cbz", "10")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")] - [InlineData("Vagabond_v03", "3")] - [InlineData("Mujaki No Rakune Volume 10.cbz", "10")] - [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "0")] - [InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")] - [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")] - [InlineData("Gantz.V26.cbz", "26")] - [InlineData("NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar", "4")] - [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "1")] - [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")] - [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")] - [InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")] - [InlineData("Noblesse - Episode 406 (52 Pages).7z", "0")] - [InlineData("X-Men v1 #201 (September 2007).cbz", "1")] - [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")] - [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")] - [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03.5 Ch. 023.5 - Volume 3 Extras.cbz", "3.5")] - [InlineData("幽游白书完全版 第03卷 天下", "3")] - [InlineData("阿衰online 第1册", "1")] - [InlineData("【TFO汉化&Petit汉化】迷你偶像漫画卷2第25话", "2")] - [InlineData("63권#200", "63")] - [InlineData("시즌34삽화2", "34")] - [InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1巻", "1")] - [InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")] - public void ParseVolumeTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename)); - } - - [Theory] - [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "Killing Bites")] - [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "My Girlfriend Is Shobitch")] - [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "Historys Strongest Disciple Kenichi")] - [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "B Gata H Kei")] - [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "BTOOOM!")] - [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "Gokukoku no Brynhildr")] - [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "Dance in the Vampire Bund")] - [InlineData("v001", "")] - [InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "U12")] - [InlineData("Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)", "Akame ga KILL! ZERO")] - [InlineData("APOSIMZ 017 (2018) (Digital) (danke-Empire).cbz", "APOSIMZ")] - [InlineData("Akiiro Bousou Biyori - 01.jpg", "Akiiro Bousou Biyori")] - [InlineData("Beelzebub_172_RHS.zip", "Beelzebub")] - [InlineData("Dr. STONE 136 (2020) (Digital) (LuCaZ).cbz", "Dr. STONE")] - [InlineData("Cynthia the Mission 29.rar", "Cynthia the Mission")] - [InlineData("Darling in the FranXX - Volume 01.cbz", "Darling in the FranXX")] - [InlineData("Darwin's Game - Volume 14 (F).cbz", "Darwin's Game")] - [InlineData("[BAA]_Darker_than_Black_c7.zip", "Darker than Black")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip", "Kedouin Makoto - Corpse Party Musume")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "Kedouin Makoto - Corpse Party Musume")] - [InlineData("[WS]_Ichiban_Ushiro_no_Daimaou_v02_ch10.zip", "Ichiban Ushiro no Daimaou")] - [InlineData("[xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans]", "Kyochuu Rettou")] - [InlineData("Loose_Relation_Between_Wizard_and_Apprentice_c07[AN].zip", "Loose Relation Between Wizard and Apprentice")] - [InlineData("Tower Of God S01 014 (CBT) (digital).cbz", "Tower Of God")] - [InlineData("Tenjou_Tenge_c106[MT].zip", "Tenjou Tenge")] - [InlineData("Tenjou_Tenge_v17_c100[MT].zip", "Tenjou Tenge")] - [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "Shimoneta - Manmaru Hen")] - [InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "Future Diary")] - [InlineData("Tonikaku Cawaii [Volume 11].cbz", "Tonikaku Cawaii")] - [InlineData("Mujaki no Rakuen Vol12 ch76", "Mujaki no Rakuen")] - [InlineData("Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]", "Knights of Sidonia")] - [InlineData("Vol 1.cbz", "")] - [InlineData("Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", "Ichinensei ni Nacchattara")] - [InlineData("Chrno_Crusade_Dragon_Age_All_Stars[AS].zip", "")] - [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip", "Ichiban Ushiro no Daimaou")] - [InlineData("Rent a Girlfriend v01.cbr", "Rent a Girlfriend")] - [InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "Yumekui Merry")] - [InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "Itoshi no Karin")] - [InlineData("Tonikaku Kawaii Vol-1 (Ch 01-08)", "Tonikaku Kawaii")] - [InlineData("Tonikaku Kawaii (Ch 59-67) (Ongoing)", "Tonikaku Kawaii")] - [InlineData("7thGARDEN v01 (2016) (Digital) (danke).cbz", "7thGARDEN")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "Kedouin Makoto - Corpse Party Musume")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09", "Kedouin Makoto - Corpse Party Musume")] - [InlineData("Goblin Slayer Side Story - Year One 025.5", "Goblin Slayer Side Story - Year One")] - [InlineData("Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)", "Goblin Slayer - Brand New Day")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01 [Dametrans][v2]", "Kedouin Makoto - Corpse Party Musume")] - [InlineData("Vagabond_v03", "Vagabond")] - [InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")] - [InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")] - [InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")] - [InlineData("Baketeriya ch01-05.zip", "Baketeriya")] - [InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")] - [InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")] - [InlineData("Fullmetal Alchemist chapters 101-108.cbz", "Fullmetal Alchemist")] - [InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "To Love Ru")] - [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "One Piece - Digital Colored Comics")] - [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01", "Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U")] - [InlineData("Vol03_ch15-22.rar", "")] - [InlineData("Love Hina - Special.cbz", "")] // This has to be a fallback case - [InlineData("Ani-Hina Art Collection.cbz", "")] // This has to be a fallback case - [InlineData("Magi - Ch.252-005.cbz", "Magi")] - [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "Umineko no Naku Koro ni")] - [InlineData("Kimetsu no Yaiba - Digital Colored Comics c162 Three Victorious Stars.cbz", "Kimetsu no Yaiba - Digital Colored Comics")] - [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "Amaenaideyo MS")] - [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "NEEDLESS")] - [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "Okusama wa Shougakusei")] - [InlineData("VanDread-v01-c001[MD].zip", "VanDread")] - [InlineData("Momo The Blood Taker - Chapter 027 Violent Emotion.cbz", "Momo The Blood Taker")] - [InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "Kiss x Sis")] - [InlineData("Green Worldz - Chapter 112 Final Chapter (End).cbz", "Green Worldz")] - [InlineData("Noblesse - Episode 406 (52 Pages).7z", "Noblesse")] - [InlineData("X-Men v1 #201 (September 2007).cbz", "X-Men")] - [InlineData("Kodoja #001 (March 2016)", "Kodoja")] - [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")] - [InlineData("Kiss x Sis - Ch.36 - A Cold Home Visit.cbz", "Kiss x Sis")] - [InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ)", "Seraph of the End - Vampire Reign")] - [InlineData("Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz", "Grand Blue Dreaming")] - [InlineData("Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz", "Yuusha Ga Shinda!")] - [InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", "Seraph of the End - Vampire Reign")] - [InlineData("Getsuyoubi no Tawawa - Ch. 001 - Ai-chan, Part 1", "Getsuyoubi no Tawawa")] - [InlineData("Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz", "Please Go Home, Akutsu-San!")] - [InlineData("Killing Bites - Vol 11 Chapter 050 Save Me, Nunupi!.cbz", "Killing Bites")] - [InlineData("Mad Chimera World - Volume 005 - Chapter 026.cbz", "Mad Chimera World")] - [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "Hentai Ouji to Warawanai Neko.")] - [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "The 100 Girlfriends Who Really, Really, Really, Really, Really Love You")] - [InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo")] - [InlineData("The Duke of Death and His Black Maid - Ch. 177 - The Ball (3).cbz", "The Duke of Death and His Black Maid")] - [InlineData("The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake", "The Duke of Death and His Black Maid")] - [InlineData("Vol. 04 Ch. 054.5", "")] - [InlineData("Great_Teacher_Onizuka_v16[TheSpectrum]", "Great Teacher Onizuka")] - [InlineData("[Renzokusei]_Kimi_wa_Midara_na_Boku_no_Joou_Ch5_Final_Chapter", "Kimi wa Midara na Boku no Joou")] - [InlineData("Battle Royale, v01 (2000) [TokyoPop] [Manga-Sketchbook]", "Battle Royale")] - [InlineData("Kaiju No. 8 036 (2021) (Digital)", "Kaiju No. 8")] - [InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", "Seraph of the End - Vampire Reign")] - [InlineData("Love Hina - Volume 01 [Scans].pdf", "Love Hina")] - [InlineData("It's Witching Time! 001 (Digital) (Anonymous1234)", "It's Witching Time!")] - [InlineData("Zettai Karen Children v02 c003 - The Invisible Guardian (2) [JS Scans]", "Zettai Karen Children")] - [InlineData("My Charms Are Wasted on Kuroiwa Medaka - Ch. 37.5 - Volume Extras", "My Charms Are Wasted on Kuroiwa Medaka")] - [InlineData("Highschool of the Dead - Full Color Edition v02 [Uasaha] (Yen Press)", "Highschool of the Dead - Full Color Edition")] - [InlineData("諌山創] 進撃の巨人 第23巻", "諌山創] 進撃の巨人")] - [InlineData("(一般コミック) [奥浩哉] いぬやしき 第09巻", "いぬやしき")] - [InlineData("Highschool of the Dead - 02", "Highschool of the Dead")] - public void ParseSeriesTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename)); - } - - [Theory] - [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")] - [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")] - [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")] - [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")] - [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")] - [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")] - [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")] - [InlineData("c001", "1")] - [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "12")] - [InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")] - [InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")] - [InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")] - [InlineData("[WS]_Ichiban_Ushiro_no_Daimaou_v02_ch10.zip", "10")] - [InlineData("Loose_Relation_Between_Wizard_and_Apprentice_c07[AN].zip", "7")] - [InlineData("Tower Of God S01 014 (CBT) (digital).cbz", "14")] - [InlineData("Tenjou_Tenge_c106[MT].zip", "106")] - [InlineData("Tenjou_Tenge_v17_c100[MT].zip", "100")] - [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1-6")] - [InlineData("Mujaki no Rakuen Vol12 ch76", "76")] - [InlineData("Beelzebub_01_[Noodles].zip", "1")] - [InlineData("Yumekui-Merry_DKThias_Chapter21.zip", "21")] - [InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")] - [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "11")] - [InlineData("Yumekui-Merry DKThiasScanlations Chapter51v2", "51")] - [InlineData("Yumekui-Merry_DKThiasScanlations&RenzokuseiScans_Chapter61", "61")] - [InlineData("Goblin Slayer Side Story - Year One 017.5", "17.5")] - [InlineData("Beelzebub_53[KSH].zip", "53")] - [InlineData("Black Bullet - v4 c20.5 [batoto]", "20.5")] - [InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")] - [InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")] - [InlineData("Vol 1", "0")] - [InlineData("VanDread-v01-c001[MD].zip", "1")] - [InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")] - [InlineData("To Love Ru v11 Uncensored (Ch.089-097+Omake)", "89-97")] - [InlineData("To Love Ru v18 Uncensored (Ch.153-162.5)", "153-162.5")] - [InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "1")] - [InlineData("Beelzebub_Side_Story_02_RHS.zip", "2")] - [InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "1")] - [InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")] - [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")] - [InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")] - [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")] - [InlineData("Beelzebub_153b_RHS.zip", "153.5")] - [InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")] - [InlineData("Transferred to another world magical swordsman v1.1", "1")] - [InlineData("Transferred to another world magical swordsman v1.2", "2")] - [InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")] - [InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")] - [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")] - [InlineData("Kiss x Sis - Ch.00 - Let's Start from 0.cbz", "0")] - [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "2")] - [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "3")] - [InlineData("Tomogui Kyoushitsu - Chapter 006 Game 005 - Fingernails On Right Hand (Part 002).cbz", "6")] - [InlineData("Noblesse - Episode 406 (52 Pages).7z", "406")] - [InlineData("X-Men v1 #201 (September 2007).cbz", "201")] - [InlineData("Kodoja #001 (March 2016)", "1")] - [InlineData("Noblesse - Episode 429 (74 Pages).7z", "429")] - [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "54")] - [InlineData("Ijousha No Ai - Vol.01 Chapter 029 8 Years Ago", "29")] - [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", "9")] - [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "34.5")] - [InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")] - [InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")] - [InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")] - [InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")] - [InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")] - [InlineData("Samurai Jack Vol. 01 - The threads of Time", "0")] - [InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")] - [InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")] - [InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")] - public void ParseChaptersTest(string filename, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename)); - } - - - [Theory] - [InlineData("Tenjou Tenge Omnibus", "Omnibus")] - [InlineData("Tenjou Tenge {Full Contact Edition}", "")] - [InlineData("Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", "")] - [InlineData("Wotakoi - Love is Hard for Otaku Omnibus v01 (2018) (Digital) (danke-Empire)", "Omnibus")] - [InlineData("To Love Ru v01 Uncensored (Ch.001-007)", "Uncensored")] - [InlineData("Chobits Omnibus Edition v01 [Dark Horse]", "Omnibus Edition")] - [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "")] - [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "")] - [InlineData("Love Hina Omnibus v05 (2015) (Digital-HD) (Asgard-Empire).cbz", "Omnibus")] - public void ParseEditionTest(string input, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input)); - } - [Theory] - [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] - [InlineData("Beelzebub_Omake_June_2012_RHS", true)] - [InlineData("Beelzebub_Side_Story_02_RHS.zip", false)] - [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)] - [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)] - [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)] - [InlineData("Ani-Hina Art Collection.cbz", true)] - [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] - [InlineData("A Town Where You Live - Bonus Chapter.zip", true)] - [InlineData("Yuki Merry - 4-Komga Anthology", false)] - [InlineData("Beastars - SP01", false)] - [InlineData("Beastars SP01", false)] - [InlineData("The League of Extraordinary Gentlemen", false)] - [InlineData("The League of Extra-ordinary Gentlemen", false)] - public void ParseMangaSpecialTest(string input, bool expected) - { - Assert.Equal(expected, !string.IsNullOrEmpty(API.Services.Tasks.Scanner.Parser.Parser.ParseMangaSpecial(input))); - } - - [Theory] - [InlineData("image.png", MangaFormat.Image)] - [InlineData("image.cbz", MangaFormat.Archive)] - [InlineData("image.txt", MangaFormat.Unknown)] - public void ParseFormatTest(string inputFile, MangaFormat expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseFormat(inputFile)); - } - - [Theory] - [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown].epub", "Side Stories")] - public void ParseSpecialTest(string inputFile, string expected) - { - Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseMangaSpecial(inputFile)); - } - - - + _testOutputHelper = testOutputHelper; } + + [Theory] + [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")] + [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "1")] + [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "11")] + [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "1")] + [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "1")] + [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1")] + [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "16-17")] + [InlineData("Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", "1")] + [InlineData("v001", "1")] + [InlineData("Vol 1", "1")] + [InlineData("vol_356-1", "356")] // Mangapy syntax + [InlineData("No Volume", "0")] + [InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")] + [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1.1")] + [InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")] + [InlineData("[WS]_Ichiban_Ushiro_no_Daimaou_v02_ch10.zip", "2")] + [InlineData("[xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans]", "1")] + [InlineData("Tower Of God S01 014 (CBT) (digital).cbz", "1")] + [InlineData("Tenjou_Tenge_v17_c100[MT].zip", "17")] + [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1")] + [InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "2")] + [InlineData("Mujaki no Rakuen Vol12 ch76", "12")] + [InlineData("Ichinensei_ni_Nacchattara_v02_ch11_[Taruby]_v1.3.zip", "2")] + [InlineData("Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", "1")] + [InlineData("Dorohedoro v11 (2013) (Digital) (LostNerevarine-Empire).cbz", "11")] + [InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")] + [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "0")] + [InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "0")] + [InlineData("VanDread-v01-c001[MD].zip", "1")] + [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")] + [InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")] + [InlineData("Kodomo no Jikan vol. 1.cbz", "1")] + [InlineData("Kodomo no Jikan vol. 10.cbz", "10")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")] + [InlineData("Vagabond_v03", "3")] + [InlineData("Mujaki No Rakune Volume 10.cbz", "10")] + [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "0")] + [InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")] + [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")] + [InlineData("Gantz.V26.cbz", "26")] + [InlineData("NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar", "4")] + [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "1")] + [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")] + [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")] + [InlineData("Sword Art Online Vol 10 - Alicization Running [Yen Press] [LuCaZ] {r2}.epub", "10")] + [InlineData("Noblesse - Episode 406 (52 Pages).7z", "0")] + [InlineData("X-Men v1 #201 (September 2007).cbz", "1")] + [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")] + [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")] + [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03.5 Ch. 023.5 - Volume 3 Extras.cbz", "3.5")] + [InlineData("幽游白书完全版 第03卷 天下", "3")] + [InlineData("阿衰online 第1册", "1")] + [InlineData("【TFO汉化&Petit汉化】迷你偶像漫画卷2第25话", "2")] + [InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1巻", "1")] + [InlineData("スライム倒して300年、知らないうちにレベルMAXになってました 1-3巻", "1-3")] + [InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "3.5")] + [InlineData("Kebab Том 1 Глава 3", "1")] + [InlineData("Манга Глава 2", "0")] + [InlineData("Манга Тома 1-4", "1-4")] + [InlineData("Манга Том 1-4", "1-4")] + [InlineData("조선왕조실톡 106화", "106")] + [InlineData("죽음 13회", "13")] + [InlineData("동의보감 13장", "13")] + [InlineData("몰?루 아카이브 7.5권", "7.5")] + [InlineData("63권#200", "63")] + [InlineData("시즌34삽화2", "34")] + public void ParseVolumeTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename)); + } + + [Theory] + [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "Killing Bites")] + [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "My Girlfriend Is Shobitch")] + [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "Historys Strongest Disciple Kenichi")] + [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "B Gata H Kei")] + [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "BTOOOM!")] + [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "Gokukoku no Brynhildr")] + [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "Dance in the Vampire Bund")] + [InlineData("v001", "")] + [InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "U12")] + [InlineData("Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)", "Akame ga KILL! ZERO")] + [InlineData("APOSIMZ 017 (2018) (Digital) (danke-Empire).cbz", "APOSIMZ")] + [InlineData("Akiiro Bousou Biyori - 01.jpg", "Akiiro Bousou Biyori")] + [InlineData("Beelzebub_172_RHS.zip", "Beelzebub")] + [InlineData("Dr. STONE 136 (2020) (Digital) (LuCaZ).cbz", "Dr. STONE")] + [InlineData("Cynthia the Mission 29.rar", "Cynthia the Mission")] + [InlineData("Darling in the FranXX - Volume 01.cbz", "Darling in the FranXX")] + [InlineData("Darwin's Game - Volume 14 (F).cbz", "Darwin's Game")] + [InlineData("[BAA]_Darker_than_Black_c7.zip", "Darker than Black")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip", "Kedouin Makoto - Corpse Party Musume")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "Kedouin Makoto - Corpse Party Musume")] + [InlineData("[WS]_Ichiban_Ushiro_no_Daimaou_v02_ch10.zip", "Ichiban Ushiro no Daimaou")] + [InlineData("[xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans]", "Kyochuu Rettou")] + [InlineData("Loose_Relation_Between_Wizard_and_Apprentice_c07[AN].zip", "Loose Relation Between Wizard and Apprentice")] + [InlineData("Tower Of God S01 014 (CBT) (digital).cbz", "Tower Of God")] + [InlineData("Tenjou_Tenge_c106[MT].zip", "Tenjou Tenge")] + [InlineData("Tenjou_Tenge_v17_c100[MT].zip", "Tenjou Tenge")] + [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "Shimoneta - Manmaru Hen")] + [InlineData("Future Diary v02 (2009) (Digital) (Viz).cbz", "Future Diary")] + [InlineData("Tonikaku Cawaii [Volume 11].cbz", "Tonikaku Cawaii")] + [InlineData("Mujaki no Rakuen Vol12 ch76", "Mujaki no Rakuen")] + [InlineData("Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]", "Knights of Sidonia")] + [InlineData("Vol 1.cbz", "")] + [InlineData("Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", "Ichinensei ni Nacchattara")] + [InlineData("Chrno_Crusade_Dragon_Age_All_Stars[AS].zip", "")] + [InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip", "Ichiban Ushiro no Daimaou")] + [InlineData("Rent a Girlfriend v01.cbr", "Rent a Girlfriend")] + [InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "Yumekui Merry")] + [InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "Itoshi no Karin")] + [InlineData("Tonikaku Kawaii Vol-1 (Ch 01-08)", "Tonikaku Kawaii")] + [InlineData("Tonikaku Kawaii (Ch 59-67) (Ongoing)", "Tonikaku Kawaii")] + [InlineData("7thGARDEN v01 (2016) (Digital) (danke).cbz", "7thGARDEN")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "Kedouin Makoto - Corpse Party Musume")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09", "Kedouin Makoto - Corpse Party Musume")] + [InlineData("Goblin Slayer Side Story - Year One 025.5", "Goblin Slayer Side Story - Year One")] + [InlineData("Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)", "Goblin Slayer - Brand New Day")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01 [Dametrans][v2]", "Kedouin Makoto - Corpse Party Musume")] + [InlineData("Vagabond_v03", "Vagabond")] + [InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")] + [InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")] + [InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")] + [InlineData("Baketeriya ch01-05.zip", "Baketeriya")] + [InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")] + [InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")] + [InlineData("Fullmetal Alchemist chapters 101-108.cbz", "Fullmetal Alchemist")] + [InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "To Love Ru")] + [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "One Piece - Digital Colored Comics")] + [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01", "Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U")] + [InlineData("Vol03_ch15-22.rar", "")] + [InlineData("Love Hina - Special.cbz", "")] // This has to be a fallback case + [InlineData("Ani-Hina Art Collection.cbz", "")] // This has to be a fallback case + [InlineData("Magi - Ch.252-005.cbz", "Magi")] + [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "Umineko no Naku Koro ni")] + [InlineData("Kimetsu no Yaiba - Digital Colored Comics c162 Three Victorious Stars.cbz", "Kimetsu no Yaiba - Digital Colored Comics")] + [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "Amaenaideyo MS")] + [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "NEEDLESS")] + [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "Okusama wa Shougakusei")] + [InlineData("VanDread-v01-c001[MD].zip", "VanDread")] + [InlineData("Momo The Blood Taker - Chapter 027 Violent Emotion.cbz", "Momo The Blood Taker")] + [InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "Kiss x Sis")] + [InlineData("Green Worldz - Chapter 112 Final Chapter (End).cbz", "Green Worldz")] + [InlineData("Noblesse - Episode 406 (52 Pages).7z", "Noblesse")] + [InlineData("X-Men v1 #201 (September 2007).cbz", "X-Men")] + [InlineData("Kodoja #001 (March 2016)", "Kodoja")] + [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")] + [InlineData("Kiss x Sis - Ch.36 - A Cold Home Visit.cbz", "Kiss x Sis")] + [InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ)", "Seraph of the End - Vampire Reign")] + [InlineData("Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz", "Grand Blue Dreaming")] + [InlineData("Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz", "Yuusha Ga Shinda!")] + [InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", "Seraph of the End - Vampire Reign")] + [InlineData("Getsuyoubi no Tawawa - Ch. 001 - Ai-chan, Part 1", "Getsuyoubi no Tawawa")] + [InlineData("Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz", "Please Go Home, Akutsu-San!")] + [InlineData("Killing Bites - Vol 11 Chapter 050 Save Me, Nunupi!.cbz", "Killing Bites")] + [InlineData("Mad Chimera World - Volume 005 - Chapter 026.cbz", "Mad Chimera World")] + [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "Hentai Ouji to Warawanai Neko.")] + [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "The 100 Girlfriends Who Really, Really, Really, Really, Really Love You")] + [InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo")] + [InlineData("The Duke of Death and His Black Maid - Ch. 177 - The Ball (3).cbz", "The Duke of Death and His Black Maid")] + [InlineData("The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake", "The Duke of Death and His Black Maid")] + [InlineData("Vol. 04 Ch. 054.5", "")] + [InlineData("Great_Teacher_Onizuka_v16[TheSpectrum]", "Great Teacher Onizuka")] + [InlineData("[Renzokusei]_Kimi_wa_Midara_na_Boku_no_Joou_Ch5_Final_Chapter", "Kimi wa Midara na Boku no Joou")] + [InlineData("Battle Royale, v01 (2000) [TokyoPop] [Manga-Sketchbook]", "Battle Royale")] + [InlineData("Kaiju No. 8 036 (2021) (Digital)", "Kaiju No. 8")] + [InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", "Seraph of the End - Vampire Reign")] + [InlineData("Love Hina - Volume 01 [Scans].pdf", "Love Hina")] + [InlineData("It's Witching Time! 001 (Digital) (Anonymous1234)", "It's Witching Time!")] + [InlineData("Zettai Karen Children v02 c003 - The Invisible Guardian (2) [JS Scans]", "Zettai Karen Children")] + [InlineData("My Charms Are Wasted on Kuroiwa Medaka - Ch. 37.5 - Volume Extras", "My Charms Are Wasted on Kuroiwa Medaka")] + [InlineData("Highschool of the Dead - Full Color Edition v02 [Uasaha] (Yen Press)", "Highschool of the Dead - Full Color Edition")] + [InlineData("諌山創] 進撃の巨人 第23巻", "諌山創] 進撃の巨人")] + [InlineData("(一般コミック) [奥浩哉] いぬやしき 第09巻", "いぬやしき")] + [InlineData("Highschool of the Dead - 02", "Highschool of the Dead")] + [InlineData("Kebab Том 1 Глава 3", "Kebab")] + [InlineData("Манга Глава 2", "Манга")] + [InlineData("Манга Глава 2-2", "Манга")] + [InlineData("Манга Том 1 3-4 Глава", "Манга")] + [InlineData("Esquire 6권 2021년 10월호", "Esquire")] + public void ParseSeriesTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseSeries(filename)); + } + + [Theory] + [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")] + [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")] + [InlineData("Historys Strongest Disciple Kenichi_v11_c90-98.zip", "90-98")] + [InlineData("B_Gata_H_Kei_v01[SlowManga&OverloadScans]", "0")] + [InlineData("BTOOOM! v01 (2013) (Digital) (Shadowcat-Empire)", "0")] + [InlineData("Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA]", "1-8")] + [InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "0")] + [InlineData("c001", "1")] + [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "0")] + [InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")] + [InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")] + [InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")] + [InlineData("[WS]_Ichiban_Ushiro_no_Daimaou_v02_ch10.zip", "10")] + [InlineData("Loose_Relation_Between_Wizard_and_Apprentice_c07[AN].zip", "7")] + [InlineData("Tower Of God S01 014 (CBT) (digital).cbz", "14")] + [InlineData("Tenjou_Tenge_c106[MT].zip", "106")] + [InlineData("Tenjou_Tenge_v17_c100[MT].zip", "100")] + [InlineData("Shimoneta - Manmaru Hen - c001-006 (v01) [Various].zip", "1-6")] + [InlineData("Mujaki no Rakuen Vol12 ch76", "76")] + [InlineData("Beelzebub_01_[Noodles].zip", "1")] + [InlineData("Yumekui-Merry_DKThias_Chapter21.zip", "21")] + [InlineData("Yumekui_Merry_v01_c01[Bakayarou-Kuu].rar", "1")] + [InlineData("Yumekui-Merry_DKThias_Chapter11v2.zip", "11")] + [InlineData("Yumekui-Merry DKThiasScanlations Chapter51v2", "51")] + [InlineData("Yumekui-Merry_DKThiasScanlations&RenzokuseiScans_Chapter61", "61")] + [InlineData("Goblin Slayer Side Story - Year One 017.5", "17.5")] + [InlineData("Beelzebub_53[KSH].zip", "53")] + [InlineData("Black Bullet - v4 c20.5 [batoto]", "20.5")] + [InlineData("Itoshi no Karin - c001-006x1 (v01) [Renzokusei Scans]", "1-6")] + [InlineData("APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", "40")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12", "12")] + [InlineData("Vol 1", "0")] + [InlineData("VanDread-v01-c001[MD].zip", "1")] + [InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")] + [InlineData("To Love Ru v11 Uncensored (Ch.089-097+Omake)", "89-97")] + [InlineData("To Love Ru v18 Uncensored (Ch.153-162.5)", "153-162.5")] + [InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "1")] + [InlineData("Beelzebub_Side_Story_02_RHS.zip", "2")] + [InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "1")] + [InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")] + [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")] + [InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")] + [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")] + [InlineData("Beelzebub_153b_RHS.zip", "153.5")] + [InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")] + [InlineData("Transferred to another world magical swordsman v1.1", "0")] + [InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")] + [InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")] + [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")] + [InlineData("Kiss x Sis - Ch.00 - Let's Start from 0.cbz", "0")] + [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "2")] + [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "3")] + [InlineData("Tomogui Kyoushitsu - Chapter 006 Game 005 - Fingernails On Right Hand (Part 002).cbz", "6")] + [InlineData("Noblesse - Episode 406 (52 Pages).7z", "406")] + [InlineData("X-Men v1 #201 (September 2007).cbz", "201")] + [InlineData("Kodoja #001 (March 2016)", "1")] + [InlineData("Noblesse - Episode 429 (74 Pages).7z", "429")] + [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "54")] + [InlineData("Ijousha No Ai - Vol.01 Chapter 029 8 Years Ago", "29")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", "9")] + [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "34.5")] + [InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")] + [InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")] + [InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")] + [InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")] + [InlineData("Kaiju No. 8 036 (2021) (Digital)", "36")] + [InlineData("Samurai Jack Vol. 01 - The threads of Time", "0")] + [InlineData("【TFO汉化&Petit汉化】迷你偶像漫画第25话", "25")] + [InlineData("자유록 13회#2", "13")] + [InlineData("이세계에서 고아원을 열었지만, 어째서인지 아무도 독립하려 하지 않는다 38-1화 ", "38")] + [InlineData("[ハレム]ナナとカオル ~高校生のSMごっこ~ 第10話", "10")] + [InlineData("Dance in the Vampire Bund {Special Edition} v03.5 (2019) (Digital) (KG Manga)", "0")] + [InlineData("Kebab Том 1 Глава 3", "3")] + [InlineData("Манга Глава 2", "2")] + [InlineData("Манга 2 Глава", "2")] + [InlineData("Манга Том 1 2 Глава", "2")] + public void ParseChaptersTest(string filename, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename)); + } + + + [Theory] + [InlineData("Tenjou Tenge Omnibus", "Omnibus")] + [InlineData("Tenjou Tenge {Full Contact Edition}", "")] + [InlineData("Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", "")] + [InlineData("Wotakoi - Love is Hard for Otaku Omnibus v01 (2018) (Digital) (danke-Empire)", "Omnibus")] + [InlineData("To Love Ru v01 Uncensored (Ch.001-007)", "Uncensored")] + [InlineData("Chobits Omnibus Edition v01 [Dark Horse]", "Omnibus Edition")] + [InlineData("Chobits_Omnibus_Edition_v01_[Dark_Horse]", "Omnibus Edition")] + [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "")] + [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "")] + [InlineData("Love Hina Omnibus v05 (2015) (Digital-HD) (Asgard-Empire).cbz", "Omnibus")] + public void ParseEditionTest(string input, string expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input)); + } + [Theory] + [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] + [InlineData("Beelzebub_Omake_June_2012_RHS", true)] + [InlineData("Beelzebub_Side_Story_02_RHS.zip", false)] + [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)] + [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)] + [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)] + [InlineData("Ani-Hina Art Collection.cbz", true)] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] + [InlineData("A Town Where You Live - Bonus Chapter.zip", true)] + [InlineData("Yuki Merry - 4-Komga Anthology", false)] + [InlineData("Beastars - SP01", false)] + [InlineData("Beastars SP01", false)] + [InlineData("The League of Extraordinary Gentlemen", false)] + [InlineData("The League of Extra-ordinary Gentlemen", false)] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown].epub", true)] + [InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire).cbz", false)] + public void IsMangaSpecialTest(string input, bool expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.IsMangaSpecial(input)); + } + + [Theory] + [InlineData("image.png", MangaFormat.Image)] + [InlineData("image.cbz", MangaFormat.Archive)] + [InlineData("image.txt", MangaFormat.Unknown)] + public void ParseFormatTest(string inputFile, MangaFormat expected) + { + Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseFormat(inputFile)); + } + + } diff --git a/API.Tests/Parser/ParserInfoTests.cs b/API.Tests/Parser/ParserInfoTests.cs index 16906cf55..ee4881eff 100644 --- a/API.Tests/Parser/ParserInfoTests.cs +++ b/API.Tests/Parser/ParserInfoTests.cs @@ -2,109 +2,108 @@ using API.Parser; using Xunit; -namespace API.Tests.Parser +namespace API.Tests.Parser; + +public class ParserInfoTests { - public class ParserInfoTests + [Fact] + public void MergeFromTest() { - [Fact] - public void MergeFromTest() + var p1 = new ParserInfo() { - var p1 = new ParserInfo() - { - Chapters = "0", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - IsSpecial = false, - Series = "darker than black", - Title = "darker than black", - Volumes = "0" - }; + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; - var p2 = new ParserInfo() - { - Chapters = "1", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - IsSpecial = false, - Series = "darker than black", - Title = "Darker Than Black", - Volumes = "0" - }; - - var expected = new ParserInfo() - { - Chapters = "1", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - IsSpecial = false, - Series = "darker than black", - Title = "darker than black", - Volumes = "0" - }; - p1.Merge(p2); - - AssertSame(expected, p1); - - } - - [Fact] - public void MergeFromTest2() + var p2 = new ParserInfo() { - var p1 = new ParserInfo() - { - Chapters = "1", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - IsSpecial = true, - Series = "darker than black", - Title = "darker than black", - Volumes = "0" - }; + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "Darker Than Black", + Volumes = "0" + }; - var p2 = new ParserInfo() - { - Chapters = "0", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - IsSpecial = false, - Series = "darker than black", - Title = "Darker Than Black", - Volumes = "1" - }; - - var expected = new ParserInfo() - { - Chapters = "1", - Edition = "", - Format = MangaFormat.Archive, - FullFilePath = "/manga/darker than black.cbz", - IsSpecial = true, - Series = "darker than black", - Title = "darker than black", - Volumes = "1" - }; - p1.Merge(p2); - - AssertSame(expected, p1); - - } - - - private static void AssertSame(ParserInfo expected, ParserInfo actual) + var expected = new ParserInfo() { - Assert.Equal(expected.Chapters, actual.Chapters); - Assert.Equal(expected.Volumes, actual.Volumes); - Assert.Equal(expected.Edition, actual.Edition); - Assert.Equal(expected.Filename, actual.Filename); - Assert.Equal(expected.Format, actual.Format); - Assert.Equal(expected.Series, actual.Series); - Assert.Equal(expected.IsSpecial, actual.IsSpecial); - Assert.Equal(expected.FullFilePath, actual.FullFilePath); - } + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + p1.Merge(p2); + + AssertSame(expected, p1); + + } + + [Fact] + public void MergeFromTest2() + { + var p1 = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var p2 = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "Darker Than Black", + Volumes = "1" + }; + + var expected = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "1" + }; + p1.Merge(p2); + + AssertSame(expected, p1); + + } + + + private static void AssertSame(ParserInfo expected, ParserInfo actual) + { + Assert.Equal(expected.Chapters, actual.Chapters); + Assert.Equal(expected.Volumes, actual.Volumes); + Assert.Equal(expected.Edition, actual.Edition); + Assert.Equal(expected.Filename, actual.Filename); + Assert.Equal(expected.Format, actual.Format); + Assert.Equal(expected.Series, actual.Series); + Assert.Equal(expected.IsSpecial, actual.IsSpecial); + Assert.Equal(expected.FullFilePath, actual.FullFilePath); } } diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs index c1ef966c9..e2f06465b 100644 --- a/API.Tests/Parser/ParserTest.cs +++ b/API.Tests/Parser/ParserTest.cs @@ -2,233 +2,291 @@ using System.Linq; using Xunit; using static API.Services.Tasks.Scanner.Parser.Parser; -namespace API.Tests.Parser +namespace API.Tests.Parser; + +public class ParserTests { - public class ParserTests + [Theory] + [InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")] + [InlineData("Shmo, Joe", "Shmo, Joe")] + [InlineData(" Joe Shmo ", "Joe Shmo")] + public void CleanAuthorTest(string input, string expected) { - [Theory] - [InlineData("Joe Shmo, Green Blue", "Joe Shmo, Green Blue")] - [InlineData("Shmo, Joe", "Shmo, Joe")] - [InlineData(" Joe Shmo ", "Joe Shmo")] - public void CleanAuthorTest(string input, string expected) - { - Assert.Equal(expected, CleanAuthor(input)); - } + Assert.Equal(expected, CleanAuthor(input)); + } - [Theory] - [InlineData("", "")] - [InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")] - [InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")] - [InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")] - public void CleanSpecialTitleTest(string input, string expected) - { - Assert.Equal(expected, CleanSpecialTitle(input)); - } + [Theory] + [InlineData("", "")] + [InlineData("DEAD Tube Prologue", "DEAD Tube Prologue")] + [InlineData("DEAD Tube Prologue SP01", "DEAD Tube Prologue")] + [InlineData("DEAD_Tube_Prologue SP01", "DEAD Tube Prologue")] + public void CleanSpecialTitleTest(string input, string expected) + { + Assert.Equal(expected, CleanSpecialTitle(input)); + } - [Theory] - [InlineData("Beastars - SP01", true)] - [InlineData("Beastars SP01", true)] - [InlineData("Beastars Special 01", false)] - [InlineData("Beastars Extra 01", false)] - [InlineData("Batman Beyond - Return of the Joker (2001) SP01", true)] - public void HasSpecialTest(string input, bool expected) - { - Assert.Equal(expected, HasSpecialMarker(input)); - } + [Theory] + [InlineData("Beastars - SP01", true)] + [InlineData("Beastars SP01", true)] + [InlineData("Beastars Special 01", false)] + [InlineData("Beastars Extra 01", false)] + [InlineData("Batman Beyond - Return of the Joker (2001) SP01", true)] + public void HasSpecialTest(string input, bool expected) + { + Assert.Equal(expected, HasSpecialMarker(input)); + } - [Theory] - [InlineData("0001", "1")] - [InlineData("1", "1")] - [InlineData("0013", "13")] - public void RemoveLeadingZeroesTest(string input, string expected) - { - Assert.Equal(expected, RemoveLeadingZeroes(input)); - } + [Theory] + [InlineData("0001", "1")] + [InlineData("1", "1")] + [InlineData("0013", "13")] + public void RemoveLeadingZeroesTest(string input, string expected) + { + Assert.Equal(expected, RemoveLeadingZeroes(input)); + } - [Theory] - [InlineData("1", "001")] - [InlineData("10", "010")] - [InlineData("100", "100")] - public void PadZerosTest(string input, string expected) - { - Assert.Equal(expected, PadZeros(input)); - } + [Theory] + [InlineData("1", "001")] + [InlineData("10", "010")] + [InlineData("100", "100")] + public void PadZerosTest(string input, string expected) + { + Assert.Equal(expected, PadZeros(input)); + } - [Theory] - [InlineData("Hello_I_am_here", false, "Hello I am here")] - [InlineData("Hello_I_am_here ", false, "Hello I am here")] - [InlineData("[ReleaseGroup] The Title", false, "The Title")] - [InlineData("[ReleaseGroup]_The_Title", false, "The Title")] - [InlineData("-The Title", false, "The Title")] - [InlineData("- The Title", false, "The Title")] - [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")] - [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")] - [InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")] - public void CleanTitleTest(string input, bool isComic, string expected) - { - Assert.Equal(expected, CleanTitle(input, isComic)); - } + [Theory] + [InlineData("Hello_I_am_here", false, "Hello I am here")] + [InlineData("Hello_I_am_here ", false, "Hello I am here")] + [InlineData("[ReleaseGroup] The Title", false, "The Title")] + [InlineData("[ReleaseGroup]_The_Title", false, "The Title")] + [InlineData("-The Title", false, "The Title")] + [InlineData("- The Title", false, "The Title")] + [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")] + [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")] + [InlineData("Something - Full Color Edition", false, "Something - Full Color Edition")] + [InlineData("Witchblade 089 (2005) (Bittertek-DCP) (Top Cow (Image Comics))", true, "Witchblade 089")] + [InlineData("(C99) Kami-sama Hiroimashita. (SSSS.GRIDMAN)", false, "Kami-sama Hiroimashita.")] + [InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false, "Dr. Ramune - Mysterious Disease Specialist v01")] + [InlineData("Magic Knight Rayearth {Omnibus Edition}", false, "Magic Knight Rayearth {}")] + [InlineData("Magic Knight Rayearth {Omnibus Version}", false, "Magic Knight Rayearth { Version}")] + public void CleanTitleTest(string input, bool isComic, string expected) + { + Assert.Equal(expected, CleanTitle(input, isComic)); + } - [Theory] - [InlineData("src: url(fonts/AvenirNext-UltraLight.ttf)", true)] - [InlineData("src: url(ideal-sans-serif.woff)", true)] - [InlineData("src: local(\"Helvetica Neue Bold\")", true)] - [InlineData("src: url(\"/fonts/OpenSans-Regular-webfont.woff2\")", true)] - [InlineData("src: local(\"/fonts/OpenSans-Regular-webfont.woff2\")", true)] - [InlineData("src: url(data:application/x-font-woff", false)] - public void FontCssRewriteMatches(string input, bool expectedMatch) - { - Assert.Equal(expectedMatch, FontSrcUrlRegex.Matches(input).Count > 0); - } + [Theory] + [InlineData("src: url(fonts/AvenirNext-UltraLight.ttf)", true)] + [InlineData("src: url(ideal-sans-serif.woff)", true)] + [InlineData("src: local(\"Helvetica Neue Bold\")", true)] + [InlineData("src: url(\"/fonts/OpenSans-Regular-webfont.woff2\")", true)] + [InlineData("src: local(\"/fonts/OpenSans-Regular-webfont.woff2\")", true)] + [InlineData("src: url(data:application/x-font-woff", false)] + public void FontCssRewriteMatches(string input, bool expectedMatch) + { + Assert.Equal(expectedMatch, FontSrcUrlRegex.Matches(input).Count > 0); + } - [Theory] - [InlineData("src: url(fonts/AvenirNext-UltraLight.ttf)", new [] {"src: url(", "fonts/AvenirNext-UltraLight.ttf", ")"})] - [InlineData("src: url(ideal-sans-serif.woff)", new [] {"src: url(", "ideal-sans-serif.woff", ")"})] - [InlineData("src: local(\"Helvetica Neue Bold\")", new [] {"src: local(\"", "Helvetica Neue Bold", "\")"})] - [InlineData("src: url(\"/fonts/OpenSans-Regular-webfont.woff2\")", new [] {"src: url(\"", "/fonts/OpenSans-Regular-webfont.woff2", "\")"})] - [InlineData("src: local(\"/fonts/OpenSans-Regular-webfont.woff2\")", new [] {"src: local(\"", "/fonts/OpenSans-Regular-webfont.woff2", "\")"})] - public void FontCssCorrectlySeparates(string input, string[] expected) - { - Assert.Equal(expected, FontSrcUrlRegex.Match(input).Groups.Values.Select(g => g.Value).Where((_, i) => i > 0).ToArray()); - } + [Theory] + [InlineData("src: url(fonts/AvenirNext-UltraLight.ttf)", new [] {"src: url(", "fonts/AvenirNext-UltraLight.ttf", ")"})] + [InlineData("src: url(ideal-sans-serif.woff)", new [] {"src: url(", "ideal-sans-serif.woff", ")"})] + [InlineData("src: local(\"Helvetica Neue Bold\")", new [] {"src: local(\"", "Helvetica Neue Bold", "\")"})] + [InlineData("src: url(\"/fonts/OpenSans-Regular-webfont.woff2\")", new [] {"src: url(\"", "/fonts/OpenSans-Regular-webfont.woff2", "\")"})] + [InlineData("src: local(\"/fonts/OpenSans-Regular-webfont.woff2\")", new [] {"src: local(\"", "/fonts/OpenSans-Regular-webfont.woff2", "\")"})] + public void FontCssCorrectlySeparates(string input, string[] expected) + { + Assert.Equal(expected, FontSrcUrlRegex.Match(input).Groups.Values.Select(g => g.Value).Where((_, i) => i > 0).ToArray()); + } - [Theory] - [InlineData("test.cbz", true)] - [InlineData("test.cbr", true)] - [InlineData("test.zip", true)] - [InlineData("test.rar", true)] - [InlineData("test.rar.!qb", false)] - [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] - public void IsArchiveTest(string input, bool expected) - { - Assert.Equal(expected, IsArchive(input)); - } + [Theory] + [InlineData("test.cbz", true)] + [InlineData("test.cbr", true)] + [InlineData("test.zip", true)] + [InlineData("test.rar", true)] + [InlineData("test.rar.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] + public void IsArchiveTest(string input, bool expected) + { + Assert.Equal(expected, IsArchive(input)); + } - [Theory] - [InlineData("test.epub", true)] - [InlineData("test.pdf", true)] - [InlineData("test.mobi", false)] - [InlineData("test.djvu", false)] - [InlineData("test.zip", false)] - [InlineData("test.rar", false)] - [InlineData("test.epub.!qb", false)] - [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] - public void IsBookTest(string input, bool expected) - { - Assert.Equal(expected, IsBook(input)); - } + [Theory] + [InlineData("test.epub", true)] + [InlineData("test.pdf", true)] + [InlineData("test.mobi", false)] + [InlineData("test.djvu", false)] + [InlineData("test.zip", false)] + [InlineData("test.rar", false)] + [InlineData("test.epub.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] + public void IsBookTest(string input, bool expected) + { + Assert.Equal(expected, IsBook(input)); + } - [Theory] - [InlineData("test.epub", true)] - [InlineData("test.EPUB", true)] - [InlineData("test.mobi", false)] - [InlineData("test.epub.!qb", false)] - [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] - public void IsEpubTest(string input, bool expected) - { - Assert.Equal(expected, IsEpub(input)); - } + [Theory] + [InlineData("test.epub", true)] + [InlineData("test.EPUB", true)] + [InlineData("test.mobi", false)] + [InlineData("test.epub.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] + public void IsEpubTest(string input, bool expected) + { + Assert.Equal(expected, IsEpub(input)); + } - [Theory] - [InlineData("12-14", 12)] - [InlineData("24", 24)] - [InlineData("18-04", 4)] - [InlineData("18-04.5", 4.5)] - [InlineData("40", 40)] - [InlineData("40a-040b", 0)] - [InlineData("40.1_a", 0)] - public void MinimumNumberFromRangeTest(string input, float expected) - { - Assert.Equal(expected, MinNumberFromRange(input)); - } + [Theory] + [InlineData("12-14", 12)] + [InlineData("24", 24)] + [InlineData("18-04", 4)] + [InlineData("18-04.5", 4.5)] + [InlineData("40", 40)] + [InlineData("40a-040b", 0)] + [InlineData("40.1_a", 0)] + [InlineData("3.5", 3.5)] + [InlineData("3.5-4.0", 3.5)] + [InlineData("asdfasdf", 0.0)] + public void MinimumNumberFromRangeTest(string input, float expected) + { + Assert.Equal(expected, MinNumberFromRange(input)); + } - [Theory] - [InlineData("12-14", 14)] - [InlineData("24", 24)] - [InlineData("18-04", 18)] - [InlineData("18-04.5", 18)] - [InlineData("40", 40)] - [InlineData("40a-040b", 0)] - [InlineData("40.1_a", 0)] - public void MaximumNumberFromRangeTest(string input, float expected) - { - Assert.Equal(expected, MaxNumberFromRange(input)); - } + [Theory] + [InlineData("12-14", 14)] + [InlineData("24", 24)] + [InlineData("18-04", 18)] + [InlineData("18-04.5", 18)] + [InlineData("40", 40)] + [InlineData("40a-040b", 0)] + [InlineData("40.1_a", 0)] + [InlineData("3.5", 3.5)] + [InlineData("3.5-4.0", 4.0)] + [InlineData("asdfasdf", 0.0)] + public void MaximumNumberFromRangeTest(string input, float expected) + { + Assert.Equal(expected, MaxNumberFromRange(input)); + } - [Theory] - [InlineData("Darker Than Black", "darkerthanblack")] - [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] - [InlineData("Darker Than_Black", "darkerthanblack")] - [InlineData("Citrus", "citrus")] - [InlineData("Citrus+", "citrus+")] - [InlineData("Again!!!!", "again")] - [InlineData("카비타", "카비타")] - [InlineData("06", "06")] - [InlineData("", "")] - public void NormalizeTest(string input, string expected) - { - Assert.Equal(expected, Normalize(input)); - } + [Theory] + [InlineData("Darker Than Black", "darkerthanblack")] + [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] + [InlineData("Darker Than_Black", "darkerthanblack")] + [InlineData("Citrus", "citrus")] + [InlineData("Citrus+", "citrus+")] + [InlineData("Again", "again")] + [InlineData("카비타", "카비타")] + [InlineData("06", "06")] + [InlineData("", "")] + public void NormalizeTest(string input, string expected) + { + Assert.Equal(expected, Normalize(input)); + } - [Theory] - [InlineData("test.jpg", true)] - [InlineData("test.jpeg", true)] - [InlineData("test.png", true)] - [InlineData(".test.jpg", false)] - [InlineData("!test.jpg", true)] - [InlineData("test.webp", true)] - [InlineData("test.gif", true)] - public void IsImageTest(string filename, bool expected) - { - Assert.Equal(expected, IsImage(filename)); - } + [Theory] + [InlineData("test.jpg", true)] + [InlineData("test.jpeg", true)] + [InlineData("test.png", true)] + [InlineData(".test.jpg", false)] + [InlineData("!test.jpg", true)] + [InlineData("test.webp", true)] + [InlineData("test.gif", true)] + public void IsImageTest(string filename, bool expected) + { + Assert.Equal(expected, IsImage(filename)); + } - [Theory] - [InlineData("Love Hina - Special.jpg", false)] - [InlineData("folder.jpg", true)] - [InlineData("DearS_v01_cover.jpg", true)] - [InlineData("DearS_v01_covers.jpg", false)] - [InlineData("!cover.jpg", true)] - [InlineData("cover.jpg", true)] - [InlineData("cover.png", true)] - [InlineData("ch1/cover.png", true)] - [InlineData("ch1/backcover.png", false)] - [InlineData("backcover.png", false)] - [InlineData("back_cover.png", false)] - public void IsCoverImageTest(string inputPath, bool expected) - { - Assert.Equal(expected, IsCoverImage(inputPath)); - } + [Theory] + [InlineData("Love Hina - Special.jpg", false)] + [InlineData("folder.jpg", true)] + [InlineData("DearS_v01_cover.jpg", true)] + [InlineData("DearS_v01_covers.jpg", false)] + [InlineData("!cover.jpg", true)] + [InlineData("cover.jpg", true)] + [InlineData("cover.png", true)] + [InlineData("ch1/cover.png", true)] + [InlineData("ch1/backcover.png", false)] + [InlineData("backcover.png", false)] + [InlineData("back_cover.png", false)] + public void IsCoverImageTest(string inputPath, bool expected) + { + Assert.Equal(expected, IsCoverImage(inputPath)); + } - [Theory] - [InlineData("__MACOSX/Love Hina - Special.jpg", true)] - [InlineData("TEST/Love Hina - Special.jpg", false)] - [InlineData("__macosx/Love Hina/", false)] - [InlineData("MACOSX/Love Hina/", false)] - [InlineData("._Love Hina/Love Hina/", true)] - [InlineData("@Recently-Snapshot/Love Hina/", true)] - [InlineData("@recycle/Love Hina/", true)] - [InlineData("E:/Test/__MACOSX/Love Hina/", true)] - public void HasBlacklistedFolderInPathTest(string inputPath, bool expected) - { - Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); - } + [Theory] + [InlineData("__MACOSX/Love Hina - Special.jpg", true)] + [InlineData("TEST/Love Hina - Special.jpg", false)] + [InlineData("__macosx/Love Hina/", false)] + [InlineData("MACOSX/Love Hina/", false)] + [InlineData("._Love Hina/Love Hina/", true)] + [InlineData("@Recently-Snapshot/Love Hina/", true)] + [InlineData("@recycle/Love Hina/", true)] + [InlineData("E:/Test/__MACOSX/Love Hina/", true)] + public void HasBlacklistedFolderInPathTest(string inputPath, bool expected) + { + Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); + } - [Theory] - [InlineData("/manga/1/1/1", "/manga/1/1/1")] - [InlineData("/manga/1/1/1.jpg", "/manga/1/1/1.jpg")] - [InlineData(@"/manga/1/1\1.jpg", @"/manga/1/1/1.jpg")] - [InlineData("/manga/1/1//1", "/manga/1/1/1")] - [InlineData("/manga/1\\1\\1", "/manga/1/1/1")] - [InlineData("C:/manga/1\\1\\1.jpg", "C:/manga/1/1/1.jpg")] - public void NormalizePathTest(string inputPath, string expected) - { - Assert.Equal(expected, NormalizePath(inputPath)); - } + [Theory] + [InlineData("/manga/1/1/1", "/manga/1/1/1")] + [InlineData("/manga/1/1/1.jpg", "/manga/1/1/1.jpg")] + [InlineData(@"/manga/1/1\1.jpg", @"/manga/1/1/1.jpg")] + [InlineData("/manga/1/1//1", "/manga/1/1/1")] + [InlineData("/manga/1\\1\\1", "/manga/1/1/1")] + [InlineData("C:/manga/1\\1\\1.jpg", "C:/manga/1/1/1.jpg")] + public void NormalizePathTest(string inputPath, string expected) + { + Assert.Equal(expected, NormalizePath(inputPath)); + } + + [Theory] + [InlineData("The quick brown fox jumps over the lazy dog")] + [InlineData("(The quick brown fox jumps over the lazy dog)")] + [InlineData("()The quick brown fox jumps over the lazy dog")] + [InlineData("The ()quick brown fox jumps over the lazy dog")] + [InlineData("The (quick (brown)) fox jumps over the lazy dog")] + [InlineData("The (quick (brown) fox jumps over the lazy dog)")] + public void BalancedParenTestMatches(string input) + { + Assert.Matches($@"^{BalancedParen}$", input); + } + + [Theory] + [InlineData("(The quick brown fox jumps over the lazy dog")] + [InlineData("The quick brown fox jumps over the lazy dog)")] + [InlineData("The )(quick brown fox jumps over the lazy dog")] + [InlineData("The quick (brown)) fox jumps over the lazy dog")] + [InlineData("The quick (brown) fox jumps over the lazy dog)")] + [InlineData("(The ))(quick (brown) fox jumps over the lazy dog")] + public void BalancedParenTestDoesNotMatch(string input) + { + Assert.DoesNotMatch($@"^{BalancedParen}$", input); + } + + [Theory] + [InlineData("The quick brown fox jumps over the lazy dog")] + [InlineData("[The quick brown fox jumps over the lazy dog]")] + [InlineData("[]The quick brown fox jumps over the lazy dog")] + [InlineData("The []quick brown fox jumps over the lazy dog")] + [InlineData("The [quick [brown]] fox jumps over the lazy dog")] + [InlineData("The [quick [brown] fox jumps over the lazy dog]")] + public void BalancedBrackTestMatches(string input) + { + Assert.Matches($@"^{BalancedBrack}$", input); + } + + [Theory] + [InlineData("[The quick brown fox jumps over the lazy dog")] + [InlineData("The quick brown fox jumps over the lazy dog]")] + [InlineData("The ][quick brown fox jumps over the lazy dog")] + [InlineData("The quick [brown]] fox jumps over the lazy dog")] + [InlineData("The quick [brown] fox jumps over the lazy dog]")] + [InlineData("[The ]][quick [brown] fox jumps over the lazy dog")] + public void BalancedBrackTestDoesNotMatch(string input) + { + Assert.DoesNotMatch($@"^{BalancedBrack}$", input); } } diff --git a/API.Tests/Repository/SeriesRepositoryTests.cs b/API.Tests/Repository/SeriesRepositoryTests.cs index 65491d333..fe285641e 100644 --- a/API.Tests/Repository/SeriesRepositoryTests.cs +++ b/API.Tests/Repository/SeriesRepositoryTests.cs @@ -140,12 +140,12 @@ public class SeriesRepositoryTests [InlineData("Heion Sedai no Idaten-tachi", "", MangaFormat.Archive, "The Idaten Deities Know Only Peace")] // Matching on localized name in DB [InlineData("Heion Sedai no Idaten-tachi", "", MangaFormat.Pdf, null)] - public async Task GetFullSeriesByAnyName_Should(string seriesName, string localizedName, string? expected) + public async Task GetFullSeriesByAnyName_Should(string seriesName, MangaFormat format, string localizedName, string? expected) { var firstSeries = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1); var series = await _unitOfWork.SeriesRepository.GetFullSeriesByAnyName(seriesName, localizedName, - 1, MangaFormat.Unknown); + 1, format); if (expected == null) { Assert.Null(series); @@ -157,4 +157,6 @@ public class SeriesRepositoryTests } } + + //public async Task } diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index 2521d17af..f399cb790 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -14,317 +14,347 @@ using NSubstitute.Extensions; using Xunit; using Xunit.Abstractions; -namespace API.Tests.Services +namespace API.Tests.Services; + +public class ArchiveServiceTests { - public class ArchiveServiceTests + private readonly ITestOutputHelper _testOutputHelper; + private readonly ArchiveService _archiveService; + private readonly ILogger _logger = Substitute.For>(); + private readonly ILogger _directoryServiceLogger = Substitute.For>(); + private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); + + public ArchiveServiceTests(ITestOutputHelper testOutputHelper) { - private readonly ITestOutputHelper _testOutputHelper; - private readonly ArchiveService _archiveService; - private readonly ILogger _logger = Substitute.For>(); - private readonly ILogger _directoryServiceLogger = Substitute.For>(); - private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); - - public ArchiveServiceTests(ITestOutputHelper testOutputHelper) - { - _testOutputHelper = testOutputHelper; - _archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For>(), _directoryService)); - } - - [Theory] - [InlineData("flat file.zip", false)] - [InlineData("file in folder in folder.zip", true)] - [InlineData("file in folder.zip", true)] - [InlineData("file in folder_alt.zip", true)] - public void ArchiveNeedsFlatteningTest(string archivePath, bool expected) - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - var file = Path.Join(testDirectory, archivePath); - using ZipArchive archive = ZipFile.OpenRead(file); - Assert.Equal(expected, _archiveService.ArchiveNeedsFlattening(archive)); - } - - [Theory] - [InlineData("non existent file.zip", false)] - [InlineData("winrar.rar", true)] - [InlineData("empty.zip", true)] - [InlineData("flat file.zip", true)] - [InlineData("file in folder in folder.zip", true)] - [InlineData("file in folder.zip", true)] - [InlineData("file in folder_alt.zip", true)] - public void IsValidArchiveTest(string archivePath, bool expected) - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - Assert.Equal(expected, _archiveService.IsValidArchive(Path.Join(testDirectory, archivePath))); - } - - [Theory] - [InlineData("non existent file.zip", 0)] - [InlineData("winrar.rar", 0)] - [InlineData("empty.zip", 0)] - [InlineData("flat file.zip", 1)] - [InlineData("file in folder in folder.zip", 1)] - [InlineData("file in folder.zip", 1)] - [InlineData("file in folder_alt.zip", 1)] - [InlineData("macos_none.zip", 0)] - [InlineData("macos_one.zip", 1)] - [InlineData("macos_native.zip", 21)] - [InlineData("macos_withdotunder_one.zip", 1)] - public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected) - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - var sw = Stopwatch.StartNew(); - Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath))); - _testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms"); - } - - - - [Theory] - [InlineData("non existent file.zip", ArchiveLibrary.NotSupported)] - [InlineData("winrar.rar", ArchiveLibrary.SharpCompress)] - [InlineData("empty.zip", ArchiveLibrary.Default)] - [InlineData("flat file.zip", ArchiveLibrary.Default)] - [InlineData("file in folder in folder.zip", ArchiveLibrary.Default)] - [InlineData("file in folder.zip", ArchiveLibrary.Default)] - [InlineData("file in folder_alt.zip", ArchiveLibrary.Default)] - public void CanOpenArchive(string archivePath, ArchiveLibrary expected) - { - var sw = Stopwatch.StartNew(); - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - - Assert.Equal(expected, _archiveService.CanOpen(Path.Join(testDirectory, archivePath))); - _testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms"); - } - - - [Theory] - [InlineData("non existent file.zip", 0)] - [InlineData("winrar.rar", 0)] - [InlineData("empty.zip", 0)] - [InlineData("flat file.zip", 1)] - [InlineData("file in folder in folder.zip", 1)] - [InlineData("file in folder.zip", 1)] - [InlineData("file in folder_alt.zip", 1)] - public void CanExtractArchive(string archivePath, int expectedFileCount) - { - - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction"); - - _directoryService.ClearAndDeleteDirectory(extractDirectory); - - var sw = Stopwatch.StartNew(); - _archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory); - var di1 = new DirectoryInfo(extractDirectory); - Assert.Equal(expectedFileCount, di1.Exists ? _directoryService.GetFiles(extractDirectory, searchOption:SearchOption.AllDirectories).Count() : 0); - _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); - - _directoryService.ClearAndDeleteDirectory(extractDirectory); - } - - - [Theory] - [InlineData(new [] {"folder.jpg"}, "folder.jpg")] - [InlineData(new [] {"vol1/"}, "")] - [InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")] - [InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")] - [InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "")] - [InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "folder.jpg")] - public void FindFolderEntry(string[] files, string expected) - { - var foundFile = ArchiveService.FindFolderEntry(files); - Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile); - } - - [Theory] - [InlineData(new [] {"folder.jpg"}, "folder.jpg")] - [InlineData(new [] {"vol1/"}, "")] - [InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")] - [InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")] - [InlineData(new [] {"page 2.jpg", "page 10.jpg"}, "page 2.jpg")] - [InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "vol1/page 01.jpg")] - [InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg")] - [InlineData(new [] {"001.jpg", "001 - chapter 1/001.jpg"}, "001.jpg")] - [InlineData(new [] {"chapter 1/001.jpg", "chapter 2/002.jpg", "somefile.jpg"}, "somefile.jpg")] - public void FindFirstEntry(string[] files, string expected) - { - var foundFile = ArchiveService.FirstFileEntry(files, string.Empty); - Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile); - } - - - [Theory] - [InlineData("v10.cbz", "v10.expected.png")] - [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")] - [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")] - [InlineData("macos_native.zip", "macos_native.png")] - [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")] - [InlineData("sorting.zip", "sorting.expected.png")] - [InlineData("test.zip", "test.expected.jpg")] - public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile) - { - var ds = Substitute.For(_directoryServiceLogger, new FileSystem()); - var imageService = new ImageService(Substitute.For>(), ds); - var archiveService = Substitute.For(_logger, ds, imageService); - - var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages")); - var expectedBytes = Image.Thumbnail(Path.Join(testDirectory, expectedOutputFile), 320).WriteToBuffer(".png"); - - archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default); - - var outputDir = Path.Join(testDirectory, "output"); - _directoryService.ClearDirectory(outputDir); - _directoryService.ExistOrCreate(outputDir); - - var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile), - Path.GetFileNameWithoutExtension(inputFile) + "_output", outputDir); - var actual = File.ReadAllBytes(Path.Join(outputDir, coverImagePath)); - - - Assert.Equal(expectedBytes, actual); - _directoryService.ClearAndDeleteDirectory(outputDir); - } - - - [Theory] - [InlineData("v10.cbz", "v10.expected.png")] - [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")] - [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")] - [InlineData("macos_native.zip", "macos_native.png")] - [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")] - [InlineData("sorting.zip", "sorting.expected.png")] - public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile) - { - var imageService = new ImageService(Substitute.For>(), _directoryService); - var archiveService = Substitute.For(_logger, - new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService); - var testDirectory = API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"))); - - var outputDir = Path.Join(testDirectory, "output"); - _directoryService.ClearDirectory(outputDir); - _directoryService.ExistOrCreate(outputDir); - - archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.SharpCompress); - var coverOutputFile = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile), - Path.GetFileNameWithoutExtension(inputFile), outputDir); - var actualBytes = File.ReadAllBytes(Path.Join(outputDir, coverOutputFile)); - var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); - Assert.Equal(expectedBytes, actualBytes); - - _directoryService.ClearAndDeleteDirectory(outputDir); - } - - [Theory] - [InlineData("Archives/macos_native.zip")] - [InlineData("Formats/One File with DB_Supported.zip")] - public void CanParseCoverImage(string inputFile) - { - var imageService = Substitute.For(); - imageService.WriteCoverThumbnail(Arg.Any(), Arg.Any(), Arg.Any()).Returns(x => "cover.jpg"); - var archiveService = new ArchiveService(_logger, _directoryService, imageService); - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/"); - var inputPath = Path.GetFullPath(Path.Join(testDirectory, inputFile)); - var outputPath = Path.Join(testDirectory, Path.GetFileNameWithoutExtension(inputFile) + "_output"); - new DirectoryInfo(outputPath).Create(); - var expectedImage = archiveService.GetCoverImage(inputPath, inputFile, outputPath); - Assert.Equal("cover.jpg", expectedImage); - new DirectoryInfo(outputPath).Delete(); - } - - #region ShouldHaveComicInfo - - [Fact] - public void ShouldHaveComicInfo() - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); - var archive = Path.Join(testDirectory, "ComicInfo.zip"); - const string summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?"; - - var comicInfo = _archiveService.GetComicInfo(archive); - Assert.NotNull(comicInfo); - Assert.Equal(summaryInfo, comicInfo.Summary); - } - - [Fact] - public void ShouldHaveComicInfo_WithAuthors() - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); - var archive = Path.Join(testDirectory, "ComicInfo_authors.zip"); - - var comicInfo = _archiveService.GetComicInfo(archive); - Assert.NotNull(comicInfo); - Assert.Equal("Junya Inoue", comicInfo.Writer); - } - - [Fact] - public void ShouldHaveComicInfo_TopLevelFileOnly() - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); - var archive = Path.Join(testDirectory, "ComicInfo_duplicateInfos.zip"); - - var comicInfo = _archiveService.GetComicInfo(archive); - Assert.NotNull(comicInfo); - Assert.Equal("BTOOOM!", comicInfo.Series); - } - - #endregion - - #region CanParseComicInfo - - [Fact] - public void CanParseComicInfo() - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); - var archive = Path.Join(testDirectory, "ComicInfo.zip"); - var actual = _archiveService.GetComicInfo(archive); - var expected = new ComicInfo() - { - Publisher = "Yen Press", - Genre = "Manga, Movies & TV", - Summary = - "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?", - PageCount = 194, - LanguageISO = "en", - Notes = "Scraped metadata from Comixology [CMXDB450184]", - Series = "BTOOOM!", - Title = "v01", - Web = "https://www.comixology.com/BTOOOM/digital-comic/450184" - }; - - Assert.NotStrictEqual(expected, actual); - } - - #endregion - - #region FindCoverImageFilename - - [Theory] - [InlineData(new string[] {}, "", null)] - [InlineData(new [] {"001.jpg", "002.jpg"}, "Test.zip", "001.jpg")] - [InlineData(new [] {"001.jpg", "!002.jpg"}, "Test.zip", "!002.jpg")] - [InlineData(new [] {"001.jpg", "!001.jpg"}, "Test.zip", "!001.jpg")] - [InlineData(new [] {"001.jpg", "cover.jpg"}, "Test.zip", "cover.jpg")] - [InlineData(new [] {"001.jpg", "Chapter 20/cover.jpg", "Chapter 21/0001.jpg"}, "Test.zip", "Chapter 20/cover.jpg")] - [InlineData(new [] {"._/001.jpg", "._/cover.jpg", "010.jpg"}, "Test.zip", "010.jpg")] - [InlineData(new [] {"001.txt", "002.txt", "a.jpg"}, "Test.zip", "a.jpg")] - public void FindCoverImageFilename(string[] filenames, string archiveName, string expected) - { - Assert.Equal(expected, ArchiveService.FindCoverImageFilename(archiveName, filenames)); - } - - - #endregion - - #region CreateZipForDownload - - //[Fact] - public void CreateZipForDownloadTest() - { - var fileSystem = new MockFileSystem(); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - //_archiveService.CreateZipForDownload(new []{}, outputDirectory) - } - - #endregion + _testOutputHelper = testOutputHelper; + _archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For>(), _directoryService)); } + + [Theory] + [InlineData("flat file.zip", false)] + [InlineData("file in folder in folder.zip", true)] + [InlineData("file in folder.zip", true)] + [InlineData("file in folder_alt.zip", true)] + public void ArchiveNeedsFlatteningTest(string archivePath, bool expected) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + var file = Path.Join(testDirectory, archivePath); + using var archive = ZipFile.OpenRead(file); + Assert.Equal(expected, _archiveService.ArchiveNeedsFlattening(archive)); + } + + [Theory] + [InlineData("non existent file.zip", false)] + [InlineData("winrar.rar", true)] + [InlineData("empty.zip", true)] + [InlineData("flat file.zip", true)] + [InlineData("file in folder in folder.zip", true)] + [InlineData("file in folder.zip", true)] + [InlineData("file in folder_alt.zip", true)] + public void IsValidArchiveTest(string archivePath, bool expected) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + Assert.Equal(expected, _archiveService.IsValidArchive(Path.Join(testDirectory, archivePath))); + } + + [Theory] + [InlineData("non existent file.zip", 0)] + [InlineData("winrar.rar", 0)] + [InlineData("empty.zip", 0)] + [InlineData("flat file.zip", 1)] + [InlineData("file in folder in folder.zip", 1)] + [InlineData("file in folder.zip", 1)] + [InlineData("file in folder_alt.zip", 1)] + [InlineData("macos_none.zip", 0)] + [InlineData("macos_one.zip", 1)] + [InlineData("macos_native.zip", 21)] + [InlineData("macos_withdotunder_one.zip", 1)] + public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + var sw = Stopwatch.StartNew(); + Assert.Equal(expected, _archiveService.GetNumberOfPagesFromArchive(Path.Join(testDirectory, archivePath))); + _testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms"); + } + + + + [Theory] + [InlineData("non existent file.zip", ArchiveLibrary.NotSupported)] + [InlineData("winrar.rar", ArchiveLibrary.SharpCompress)] + [InlineData("empty.zip", ArchiveLibrary.Default)] + [InlineData("flat file.zip", ArchiveLibrary.Default)] + [InlineData("file in folder in folder.zip", ArchiveLibrary.Default)] + [InlineData("file in folder.zip", ArchiveLibrary.Default)] + [InlineData("file in folder_alt.zip", ArchiveLibrary.Default)] + public void CanOpenArchive(string archivePath, ArchiveLibrary expected) + { + var sw = Stopwatch.StartNew(); + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + + Assert.Equal(expected, _archiveService.CanOpen(Path.Join(testDirectory, archivePath))); + _testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms"); + } + + + [Theory] + [InlineData("non existent file.zip", 0)] + [InlineData("winrar.rar", 0)] + [InlineData("empty.zip", 0)] + [InlineData("flat file.zip", 1)] + [InlineData("file in folder in folder.zip", 1)] + [InlineData("file in folder.zip", 1)] + [InlineData("file in folder_alt.zip", 1)] + public void CanExtractArchive(string archivePath, int expectedFileCount) + { + + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction"); + + _directoryService.ClearAndDeleteDirectory(extractDirectory); + + var sw = Stopwatch.StartNew(); + _archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory); + var di1 = new DirectoryInfo(extractDirectory); + Assert.Equal(expectedFileCount, di1.Exists ? _directoryService.GetFiles(extractDirectory, searchOption:SearchOption.AllDirectories).Count() : 0); + _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); + + _directoryService.ClearAndDeleteDirectory(extractDirectory); + } + + + [Theory] + [InlineData(new [] {"folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"vol1/"}, "")] + [InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")] + [InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "")] + [InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "folder.jpg")] + public void FindFolderEntry(string[] files, string expected) + { + var foundFile = ArchiveService.FindFolderEntry(files); + Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile); + } + + [Theory] + [InlineData(new [] {"folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"vol1/"}, "")] + [InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")] + [InlineData(new [] {"page 2.jpg", "page 10.jpg"}, "page 2.jpg")] + [InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "vol1/page 01.jpg")] + [InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg")] + [InlineData(new [] {"001.jpg", "001 - chapter 1/001.jpg"}, "001.jpg")] + [InlineData(new [] {"chapter 1/001.jpg", "chapter 2/002.jpg", "somefile.jpg"}, "somefile.jpg")] + public void FindFirstEntry(string[] files, string expected) + { + var foundFile = ArchiveService.FirstFileEntry(files, string.Empty); + Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile); + } + + + [Theory] + [InlineData("v10.cbz", "v10.expected.png")] + [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")] + [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")] + [InlineData("macos_native.zip", "macos_native.png")] + [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")] + [InlineData("sorting.zip", "sorting.expected.png")] + [InlineData("test.zip", "test.expected.jpg")] + public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile) + { + var ds = Substitute.For(_directoryServiceLogger, new FileSystem()); + var imageService = new ImageService(Substitute.For>(), ds); + var archiveService = Substitute.For(_logger, ds, imageService); + + var testDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages")); + var expectedBytes = Image.Thumbnail(Path.Join(testDirectory, expectedOutputFile), 320).WriteToBuffer(".png"); + + archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default); + + var outputDir = Path.Join(testDirectory, "output"); + _directoryService.ClearDirectory(outputDir); + _directoryService.ExistOrCreate(outputDir); + + var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile), + Path.GetFileNameWithoutExtension(inputFile) + "_output", outputDir); + var actual = File.ReadAllBytes(Path.Join(outputDir, coverImagePath)); + + + Assert.Equal(expectedBytes, actual); + _directoryService.ClearAndDeleteDirectory(outputDir); + } + + + [Theory] + [InlineData("v10.cbz", "v10.expected.png")] + [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.png")] + [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.png")] + [InlineData("macos_native.zip", "macos_native.png")] + [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.png")] + [InlineData("sorting.zip", "sorting.expected.png")] + public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile) + { + var imageService = new ImageService(Substitute.For>(), _directoryService); + var archiveService = Substitute.For(_logger, + new DirectoryService(_directoryServiceLogger, new FileSystem()), imageService); + var testDirectory = API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"))); + + var outputDir = Path.Join(testDirectory, "output"); + _directoryService.ClearDirectory(outputDir); + _directoryService.ExistOrCreate(outputDir); + + archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.SharpCompress); + var coverOutputFile = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile), + Path.GetFileNameWithoutExtension(inputFile), outputDir); + var actualBytes = File.ReadAllBytes(Path.Join(outputDir, coverOutputFile)); + var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); + Assert.Equal(expectedBytes, actualBytes); + + _directoryService.ClearAndDeleteDirectory(outputDir); + } + + [Theory] + [InlineData("Archives/macos_native.zip")] + [InlineData("Formats/One File with DB_Supported.zip")] + public void CanParseCoverImage(string inputFile) + { + var imageService = Substitute.For(); + imageService.WriteCoverThumbnail(Arg.Any(), Arg.Any(), Arg.Any()).Returns(x => "cover.jpg"); + var archiveService = new ArchiveService(_logger, _directoryService, imageService); + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/"); + var inputPath = Path.GetFullPath(Path.Join(testDirectory, inputFile)); + var outputPath = Path.Join(testDirectory, Path.GetFileNameWithoutExtension(inputFile) + "_output"); + new DirectoryInfo(outputPath).Create(); + var expectedImage = archiveService.GetCoverImage(inputPath, inputFile, outputPath); + Assert.Equal("cover.jpg", expectedImage); + new DirectoryInfo(outputPath).Delete(); + } + + #region ShouldHaveComicInfo + + [Fact] + public void ShouldHaveComicInfo() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, "ComicInfo.zip"); + const string summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?"; + + var comicInfo = _archiveService.GetComicInfo(archive); + Assert.NotNull(comicInfo); + Assert.Equal(summaryInfo, comicInfo.Summary); + } + + [Fact] + public void ShouldHaveComicInfo_WithAuthors() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, "ComicInfo_authors.zip"); + + var comicInfo = _archiveService.GetComicInfo(archive); + Assert.NotNull(comicInfo); + Assert.Equal("Junya Inoue", comicInfo.Writer); + } + + [Theory] + [InlineData("ComicInfo_duplicateInfos.zip")] + [InlineData("ComicInfo_duplicateInfos_reversed.zip")] + [InlineData("ComicInfo_duplicateInfos.rar")] + public void ShouldHaveComicInfo_TopLevelFileOnly(string filename) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, filename); + + var comicInfo = _archiveService.GetComicInfo(archive); + Assert.NotNull(comicInfo); + Assert.Equal("BTOOOM!", comicInfo.Series); + } + + [Fact] + public void ShouldHaveComicInfo_OutsideRoot() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, "ComicInfo_outside_root.zip"); + + var comicInfo = _archiveService.GetComicInfo(archive); + Assert.NotNull(comicInfo); + Assert.Equal("BTOOOM! - Duplicate", comicInfo.Series); + } + + #endregion + + #region CanParseComicInfo + + [Fact] + public void CanParseComicInfo() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, "ComicInfo.zip"); + var comicInfo = _archiveService.GetComicInfo(archive); + + Assert.NotNull(comicInfo); + Assert.Equal("Yen Press", comicInfo.Publisher); + Assert.Equal("Manga, Movies & TV", comicInfo.Genre); + Assert.Equal("By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?", + comicInfo.Summary); + Assert.Equal(194, comicInfo.PageCount); + Assert.Equal("en", comicInfo.LanguageISO); + Assert.Equal("Scraped metadata from Comixology [CMXDB450184]", comicInfo.Notes); + Assert.Equal("BTOOOM!", comicInfo.Series); + Assert.Equal("v01", comicInfo.Title); + Assert.Equal("https://www.comixology.com/BTOOOM/digital-comic/450184", comicInfo.Web); + } + + #endregion + + #region CanParseComicInfo_DefaultNumberIsBlank + + [Fact] + public void CanParseComicInfo_DefaultNumberIsBlank() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos"); + var archive = Path.Join(testDirectory, "ComicInfo2.zip"); + var comicInfo = _archiveService.GetComicInfo(archive); + + Assert.NotNull(comicInfo); + Assert.Equal("Hellboy", comicInfo.Series); + Assert.Equal("The Right Hand of Doom", comicInfo.Title); + Assert.Equal("", comicInfo.Number); + Assert.Equal(0, comicInfo.Count); + Assert.Equal("4", comicInfo.Volume); + } + + + #endregion + + #region FindCoverImageFilename + + [Theory] + [InlineData(new string[] {}, "", null)] + [InlineData(new [] {"001.jpg", "002.jpg"}, "Test.zip", "001.jpg")] + [InlineData(new [] {"001.jpg", "!002.jpg"}, "Test.zip", "!002.jpg")] + [InlineData(new [] {"001.jpg", "!001.jpg"}, "Test.zip", "!001.jpg")] + [InlineData(new [] {"001.jpg", "cover.jpg"}, "Test.zip", "cover.jpg")] + [InlineData(new [] {"001.jpg", "Chapter 20/cover.jpg", "Chapter 21/0001.jpg"}, "Test.zip", "Chapter 20/cover.jpg")] + [InlineData(new [] {"._/001.jpg", "._/cover.jpg", "010.jpg"}, "Test.zip", "010.jpg")] + [InlineData(new [] {"001.txt", "002.txt", "a.jpg"}, "Test.zip", "a.jpg")] + public void FindCoverImageFilename(string[] filenames, string archiveName, string expected) + { + Assert.Equal(expected, ArchiveService.FindCoverImageFilename(archiveName, filenames)); + } + + + #endregion + + #region CreateZipForDownload + + //[Fact] + public void CreateZipForDownloadTest() + { + var fileSystem = new MockFileSystem(); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + //_archiveService.CreateZipForDownload(new []{}, outputDirectory) + } + + #endregion } diff --git a/API.Tests/Services/BackupServiceTests.cs b/API.Tests/Services/BackupServiceTests.cs index ad7f8b9f9..783e0b62d 100644 --- a/API.Tests/Services/BackupServiceTests.cs +++ b/API.Tests/Services/BackupServiceTests.cs @@ -135,17 +135,9 @@ public class BackupServiceTests filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData("")); var ds = new DirectoryService(Substitute.For>(), filesystem); - var inMemorySettings = new Dictionary { - {"Logging:File:Path", "config/logs/kavita.log"}, - {"Logging:File:MaxRollingFiles", "0"}, - }; - IConfiguration configuration = new ConfigurationBuilder() - .AddInMemoryCollection(inMemorySettings) - .Build(); + var backupService = new BackupService(_logger, _unitOfWork, ds, _messageHub); - var backupService = new BackupService(_logger, _unitOfWork, ds, configuration, _messageHub); - - var backupLogFiles = backupService.GetLogFiles(0, LogDirectory).ToList(); + var backupLogFiles = backupService.GetLogFiles(false).ToList(); Assert.Single(backupLogFiles); Assert.Equal(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita.log"), API.Services.Tasks.Scanner.Parser.Parser.NormalizePath(backupLogFiles.First())); } @@ -155,20 +147,12 @@ public class BackupServiceTests { var filesystem = CreateFileSystem(); filesystem.AddFile($"{LogDirectory}kavita.log", new MockFileData("")); - filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData("")); + filesystem.AddFile($"{LogDirectory}kavita20200213.log", new MockFileData("")); var ds = new DirectoryService(Substitute.For>(), filesystem); - var inMemorySettings = new Dictionary { - {"Logging:File:Path", "config/logs/kavita.log"}, - {"Logging:File:MaxRollingFiles", "1"}, - }; - IConfiguration configuration = new ConfigurationBuilder() - .AddInMemoryCollection(inMemorySettings) - .Build(); + var backupService = new BackupService(_logger, _unitOfWork, ds, _messageHub); - var backupService = new BackupService(_logger, _unitOfWork, ds, configuration, _messageHub); - - var backupLogFiles = backupService.GetLogFiles(1, LogDirectory).Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList(); + var backupLogFiles = backupService.GetLogFiles().Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList(); Assert.NotEmpty(backupLogFiles.Where(file => file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita.log")) || file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita1.log")))); } diff --git a/API.Tests/Services/BookServiceTests.cs b/API.Tests/Services/BookServiceTests.cs index f8b726ac5..38a5da896 100644 --- a/API.Tests/Services/BookServiceTests.cs +++ b/API.Tests/Services/BookServiceTests.cs @@ -5,54 +5,53 @@ using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; -namespace API.Tests.Services +namespace API.Tests.Services; + +public class BookServiceTests { - public class BookServiceTests + private readonly IBookService _bookService; + private readonly ILogger _logger = Substitute.For>(); + + public BookServiceTests() { - private readonly IBookService _bookService; - private readonly ILogger _logger = Substitute.For>(); - - public BookServiceTests() - { - var directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); - _bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For>(), directoryService)); - } - - [Theory] - [InlineData("The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub", 16)] - [InlineData("Non-existent file.epub", 0)] - [InlineData("Non an ebub.pdf", 0)] - [InlineData("test_ſ.pdf", 1)] // This is dependent on Docnet bug https://github.com/GowenGit/docnet/issues/80 - [InlineData("test.pdf", 1)] - public void GetNumberOfPagesTest(string filePath, int expectedPages) - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService"); - Assert.Equal(expectedPages, _bookService.GetNumberOfPages(Path.Join(testDirectory, filePath))); - } - - [Fact] - public void ShouldHaveComicInfo() - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService"); - var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub"); - const string summaryInfo = "Book Description"; - - var comicInfo = _bookService.GetComicInfo(archive); - Assert.NotNull(comicInfo); - Assert.Equal(summaryInfo, comicInfo.Summary); - Assert.Equal("genre1, genre2", comicInfo.Genre); - } - - [Fact] - public void ShouldHaveComicInfo_WithAuthors() - { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService"); - var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub"); - - var comicInfo = _bookService.GetComicInfo(archive); - Assert.NotNull(comicInfo); - Assert.Equal("Roger Starbuck,Junya Inoue", comicInfo.Writer); - } - + var directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); + _bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For>(), directoryService)); } + + [Theory] + [InlineData("The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub", 16)] + [InlineData("Non-existent file.epub", 0)] + [InlineData("Non an ebub.pdf", 0)] + [InlineData("test_ſ.pdf", 1)] // This is dependent on Docnet bug https://github.com/GowenGit/docnet/issues/80 + [InlineData("test.pdf", 1)] + public void GetNumberOfPagesTest(string filePath, int expectedPages) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService"); + Assert.Equal(expectedPages, _bookService.GetNumberOfPages(Path.Join(testDirectory, filePath))); + } + + [Fact] + public void ShouldHaveComicInfo() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService"); + var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub"); + const string summaryInfo = "Book Description"; + + var comicInfo = _bookService.GetComicInfo(archive); + Assert.NotNull(comicInfo); + Assert.Equal(summaryInfo, comicInfo.Summary); + Assert.Equal("genre1, genre2", comicInfo.Genre); + } + + [Fact] + public void ShouldHaveComicInfo_WithAuthors() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService"); + var archive = Path.Join(testDirectory, "The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub"); + + var comicInfo = _bookService.GetComicInfo(archive); + Assert.NotNull(comicInfo); + Assert.Equal("Roger Starbuck,Junya Inoue", comicInfo.Writer); + } + } diff --git a/API.Tests/Services/BookmarkServiceTests.cs b/API.Tests/Services/BookmarkServiceTests.cs index 88f0fc587..97c07a281 100644 --- a/API.Tests/Services/BookmarkServiceTests.cs +++ b/API.Tests/Services/BookmarkServiceTests.cs @@ -410,7 +410,7 @@ public class BookmarkServiceTests #region Misc [Fact] - public async Task ShouldNotDeleteBookmarkOnChapterDeletion() + public async Task ShouldNotDeleteBookmark_OnChapterDeletion() { var filesystem = CreateFileSystem(); filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123")); @@ -462,8 +462,6 @@ public class BookmarkServiceTests var ds = new DirectoryService(Substitute.For>(), filesystem); - var bookmarkService = Create(ds); - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks); var vol = await _unitOfWork.VolumeRepository.GetVolumeAsync(1); vol.Chapters = new List(); @@ -475,5 +473,72 @@ public class BookmarkServiceTests Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1)); } + + [Fact] + public async Task ShouldNotDeleteBookmark_OnVolumeDeletion() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123")); + filesystem.AddFile($"{BookmarkDirectory}1/1/0001.jpg", new MockFileData("123")); + + // Delete all Series to reset state + await ResetDB(); + var series = new Series() + { + Name = "Test", + Library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + }, + Volumes = new List() + { + new Volume() + { + Chapters = new List() + { + new Chapter() + { + + } + } + } + } + }; + + _context.Series.Add(series); + + + _context.AppUser.Add(new AppUser() + { + UserName = "Joe", + Bookmarks = new List() + { + new AppUserBookmark() + { + Page = 1, + ChapterId = 1, + FileName = $"1/1/0001.jpg", + SeriesId = 1, + VolumeId = 1 + } + } + }); + + await _context.SaveChangesAsync(); + + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks); + Assert.NotEmpty(user.Bookmarks); + + series.Volumes = new List(); + _unitOfWork.SeriesRepository.Update(series); + await _unitOfWork.CommitAsync(); + + + var ds = new DirectoryService(Substitute.For>(), filesystem); + Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories)); + Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1)); + } + #endregion } diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs index a812e5bdd..e3be8dce5 100644 --- a/API.Tests/Services/CacheServiceTests.cs +++ b/API.Tests/Services/CacheServiceTests.cs @@ -20,501 +20,500 @@ using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; -namespace API.Tests.Services +namespace API.Tests.Services; + +internal class MockReadingItemServiceForCacheService : IReadingItemService { - internal class MockReadingItemServiceForCacheService : IReadingItemService + private readonly DirectoryService _directoryService; + + public MockReadingItemServiceForCacheService(DirectoryService directoryService) { - private readonly DirectoryService _directoryService; - - public MockReadingItemServiceForCacheService(DirectoryService directoryService) - { - _directoryService = directoryService; - } - - public ComicInfo GetComicInfo(string filePath) - { - return null; - } - - public int GetNumberOfPages(string filePath, MangaFormat format) - { - return 1; - } - - public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format) - { - return string.Empty; - } - - public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1) - { - throw new System.NotImplementedException(); - } - - public ParserInfo Parse(string path, string rootPath, LibraryType type) - { - throw new System.NotImplementedException(); - } - - public ParserInfo ParseFile(string path, string rootPath, LibraryType type) - { - throw new System.NotImplementedException(); - } + _directoryService = directoryService; } - public class CacheServiceTests + + public ComicInfo GetComicInfo(string filePath) { - private readonly ILogger _logger = Substitute.For>(); - private readonly IUnitOfWork _unitOfWork; - private readonly IHubContext _messageHub = Substitute.For>(); - - private readonly DbConnection _connection; - private readonly DataContext _context; - - private const string CacheDirectory = "C:/kavita/config/cache/"; - private const string CoverImageDirectory = "C:/kavita/config/covers/"; - private const string BackupDirectory = "C:/kavita/config/backups/"; - private const string DataDirectory = "C:/data/"; - - public CacheServiceTests() - { - var contextOptions = new DbContextOptionsBuilder() - .UseSqlite(CreateInMemoryDatabase()) - .Options; - _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; - - _context = new DataContext(contextOptions); - Task.Run(SeedDb).GetAwaiter().GetResult(); - - _unitOfWork = new UnitOfWork(_context, Substitute.For(), null); - } - - #region Setup - - private static DbConnection CreateInMemoryDatabase() - { - var connection = new SqliteConnection("Filename=:memory:"); - - connection.Open(); - - return connection; - } - - public void Dispose() => _connection.Dispose(); - - private async Task SeedDb() - { - await _context.Database.MigrateAsync(); - var filesystem = CreateFileSystem(); - - await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); - - var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); - setting.Value = CacheDirectory; - - setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); - setting.Value = BackupDirectory; - - _context.ServerSetting.Update(setting); - - _context.Library.Add(new Library() - { - Name = "Manga", - Folders = new List() - { - new FolderPath() - { - Path = "C:/data/" - } - } - }); - return await _context.SaveChangesAsync() > 0; - } - - private async Task ResetDB() - { - _context.Series.RemoveRange(_context.Series.ToList()); - - await _context.SaveChangesAsync(); - } - - private static MockFileSystem CreateFileSystem() - { - var fileSystem = new MockFileSystem(); - fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); - fileSystem.AddDirectory("C:/kavita/config/"); - fileSystem.AddDirectory(CacheDirectory); - fileSystem.AddDirectory(CoverImageDirectory); - fileSystem.AddDirectory(BackupDirectory); - fileSystem.AddDirectory(DataDirectory); - - return fileSystem; - } - - #endregion - - #region Ensure - - [Fact] - public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything() - { - var filesystem = CreateFileSystem(); - filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData("")); - filesystem.AddDirectory($"{CacheDirectory}1/"); - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cleanupService = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - await ResetDB(); - var s = DbFactory.Series("Test"); - var v = DbFactory.Volume("1"); - var c = new Chapter() - { - Number = "1", - Files = new List() - { - new MangaFile() - { - Format = MangaFormat.Archive, - FilePath = $"{DataDirectory}Test v1.zip", - } - } - }; - v.Chapters.Add(c); - s.Volumes.Add(v); - s.LibraryId = 1; - _context.Series.Add(s); - - await _context.SaveChangesAsync(); - - await cleanupService.Ensure(1); - Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories)); - } - - // [Fact] - // public async Task Ensure_DirectoryAlreadyExists_ExtractsImages() - // { - // // TODO: Figure out a way to test this - // var filesystem = CreateFileSystem(); - // filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData("")); - // filesystem.AddDirectory($"{CacheDirectory}1/"); - // var ds = new DirectoryService(Substitute.For>(), filesystem); - // var archiveService = Substitute.For(); - // archiveService.ExtractArchive($"{DataDirectory}Test v1.zip", - // filesystem.Path.Join(CacheDirectory, "1")); - // var cleanupService = new CacheService(_logger, _unitOfWork, ds, - // new ReadingItemService(archiveService, Substitute.For(), Substitute.For(), ds)); - // - // await ResetDB(); - // var s = DbFactory.Series("Test"); - // var v = DbFactory.Volume("1"); - // var c = new Chapter() - // { - // Number = "1", - // Files = new List() - // { - // new MangaFile() - // { - // Format = MangaFormat.Archive, - // FilePath = $"{DataDirectory}Test v1.zip", - // } - // } - // }; - // v.Chapters.Add(c); - // s.Volumes.Add(v); - // s.LibraryId = 1; - // _context.Series.Add(s); - // - // await _context.SaveChangesAsync(); - // - // await cleanupService.Ensure(1); - // Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories)); - // } - - - #endregion - - #region CleanupChapters - - [Fact] - public void CleanupChapters_AllFilesShouldBeDeleted() - { - var filesystem = CreateFileSystem(); - filesystem.AddDirectory($"{CacheDirectory}1/"); - filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData("")); - filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData("")); - filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData("")); - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cleanupService = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - cleanupService.CleanupChapters(new []{1, 3}); - Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories)); - } - - - #endregion - - #region GetCachedEpubFile - - [Fact] - public void GetCachedEpubFile_ShouldReturnFirstEpub() - { - var filesystem = CreateFileSystem(); - filesystem.AddDirectory($"{CacheDirectory}1/"); - filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData("")); - filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData("")); - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cs = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - var c = new Chapter() - { - Files = new List() - { - new MangaFile() - { - FilePath = $"{DataDirectory}1.epub" - }, - new MangaFile() - { - FilePath = $"{DataDirectory}2.epub" - } - } - }; - cs.GetCachedFile(c); - Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c)); - } - - #endregion - - #region GetCachedPagePath - - [Fact] - public void GetCachedPagePath_ReturnNullIfNoFiles() - { - var filesystem = CreateFileSystem(); - filesystem.AddDirectory($"{CacheDirectory}1/"); - filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); - filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); - - var c = new Chapter() - { - Id = 1, - Files = new List() - }; - - var fileIndex = 0; - foreach (var file in c.Files) - { - for (var i = 0; i < file.Pages - 1; i++) - { - filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); - } - - fileIndex++; - } - - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cs = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - // Flatten to prepare for how GetFullPath expects - ds.Flatten($"{CacheDirectory}1/"); - - var path = cs.GetCachedPagePath(c, 11); - Assert.Equal(string.Empty, path); - } - - [Fact] - public void GetCachedPagePath_GetFileFromFirstFile() - { - var filesystem = CreateFileSystem(); - filesystem.AddDirectory($"{CacheDirectory}1/"); - filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); - filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); - - var c = new Chapter() - { - Id = 1, - Files = new List() - { - new MangaFile() - { - Id = 1, - FilePath = $"{DataDirectory}1.zip", - Pages = 10 - - }, - new MangaFile() - { - Id = 2, - FilePath = $"{DataDirectory}2.zip", - Pages = 5 - } - } - }; - - var fileIndex = 0; - foreach (var file in c.Files) - { - for (var i = 0; i < file.Pages; i++) - { - filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData("")); - } - - fileIndex++; - } - - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cs = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - // Flatten to prepare for how GetFullPath expects - ds.Flatten($"{CacheDirectory}1/"); - - Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0))); - - } - - - [Fact] - public void GetCachedPagePath_GetLastPageFromSingleFile() - { - var filesystem = CreateFileSystem(); - filesystem.AddDirectory($"{CacheDirectory}1/"); - filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); - - var c = new Chapter() - { - Id = 1, - Files = new List() - { - new MangaFile() - { - Id = 1, - FilePath = $"{DataDirectory}1.zip", - Pages = 10 - - } - } - }; - c.Pages = c.Files.Sum(f => f.Pages); - - var fileIndex = 0; - foreach (var file in c.Files) - { - for (var i = 0; i < file.Pages; i++) - { - filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); - } - - fileIndex++; - } - - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cs = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - // Flatten to prepare for how GetFullPath expects - ds.Flatten($"{CacheDirectory}1/"); - - // Remember that we start at 0, so this is the 10th file - var path = cs.GetCachedPagePath(c, c.Pages); - Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path)); - } - - [Fact] - public void GetCachedPagePath_GetFileFromSecondFile() - { - var filesystem = CreateFileSystem(); - filesystem.AddDirectory($"{CacheDirectory}1/"); - filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); - filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); - - var c = new Chapter() - { - Id = 1, - Files = new List() - { - new MangaFile() - { - Id = 1, - FilePath = $"{DataDirectory}1.zip", - Pages = 10 - - }, - new MangaFile() - { - Id = 2, - FilePath = $"{DataDirectory}2.zip", - Pages = 5 - } - } - }; - - var fileIndex = 0; - foreach (var file in c.Files) - { - for (var i = 0; i < file.Pages; i++) - { - filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); - } - - fileIndex++; - } - - var ds = new DirectoryService(Substitute.For>(), filesystem); - var cs = new CacheService(_logger, _unitOfWork, ds, - new ReadingItemService(Substitute.For(), - Substitute.For(), Substitute.For(), ds), Substitute.For()); - - // Flatten to prepare for how GetFullPath expects - ds.Flatten($"{CacheDirectory}1/"); - - // Remember that we start at 0, so this is the page + 1 file - var path = cs.GetCachedPagePath(c, 10); - Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path)); - } - - #endregion - - #region ExtractChapterFiles - - // [Fact] - // public void ExtractChapterFiles_ShouldExtractOnlyImages() - // { - // const string testDirectory = "/manga/"; - // var fileSystem = new MockFileSystem(); - // for (var i = 0; i < 10; i++) - // { - // fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - // } - // - // fileSystem.AddDirectory(CacheDirectory); - // - // var ds = new DirectoryService(Substitute.For>(), fileSystem); - // var cs = new CacheService(_logger, _unitOfWork, ds, - // new MockReadingItemServiceForCacheService(ds)); - // - // - // cs.ExtractChapterFiles(CacheDirectory, new List() - // { - // new MangaFile() - // { - // ChapterId = 1, - // Format = MangaFormat.Archive, - // Pages = 2, - // FilePath = - // } - // }) - // } - - #endregion + return null; + } + + public int GetNumberOfPages(string filePath, MangaFormat format) + { + return 1; + } + + public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format) + { + return string.Empty; + } + + public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1) + { + throw new System.NotImplementedException(); + } + + public ParserInfo Parse(string path, string rootPath, LibraryType type) + { + throw new System.NotImplementedException(); + } + + public ParserInfo ParseFile(string path, string rootPath, LibraryType type) + { + throw new System.NotImplementedException(); } } +public class CacheServiceTests +{ + private readonly ILogger _logger = Substitute.For>(); + private readonly IUnitOfWork _unitOfWork; + private readonly IHubContext _messageHub = Substitute.For>(); + + private readonly DbConnection _connection; + private readonly DataContext _context; + + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string DataDirectory = "C:/data/"; + + public CacheServiceTests() + { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + _unitOfWork = new UnitOfWork(_context, Substitute.For(), null); + } + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = "C:/data/" + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDB() + { + _context.Series.RemoveRange(_context.Series.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(DataDirectory); + + return fileSystem; + } + + #endregion + + #region Ensure + + [Fact] + public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData("")); + filesystem.AddDirectory($"{CacheDirectory}1/"); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + await ResetDB(); + var s = DbFactory.Series("Test"); + var v = DbFactory.Volume("1"); + var c = new Chapter() + { + Number = "1", + Files = new List() + { + new MangaFile() + { + Format = MangaFormat.Archive, + FilePath = $"{DataDirectory}Test v1.zip", + } + } + }; + v.Chapters.Add(c); + s.Volumes.Add(v); + s.LibraryId = 1; + _context.Series.Add(s); + + await _context.SaveChangesAsync(); + + await cleanupService.Ensure(1); + Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories)); + } + + // [Fact] + // public async Task Ensure_DirectoryAlreadyExists_ExtractsImages() + // { + // // TODO: Figure out a way to test this + // var filesystem = CreateFileSystem(); + // filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData("")); + // filesystem.AddDirectory($"{CacheDirectory}1/"); + // var ds = new DirectoryService(Substitute.For>(), filesystem); + // var archiveService = Substitute.For(); + // archiveService.ExtractArchive($"{DataDirectory}Test v1.zip", + // filesystem.Path.Join(CacheDirectory, "1")); + // var cleanupService = new CacheService(_logger, _unitOfWork, ds, + // new ReadingItemService(archiveService, Substitute.For(), Substitute.For(), ds)); + // + // await ResetDB(); + // var s = DbFactory.Series("Test"); + // var v = DbFactory.Volume("1"); + // var c = new Chapter() + // { + // Number = "1", + // Files = new List() + // { + // new MangaFile() + // { + // Format = MangaFormat.Archive, + // FilePath = $"{DataDirectory}Test v1.zip", + // } + // } + // }; + // v.Chapters.Add(c); + // s.Volumes.Add(v); + // s.LibraryId = 1; + // _context.Series.Add(s); + // + // await _context.SaveChangesAsync(); + // + // await cleanupService.Ensure(1); + // Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories)); + // } + + + #endregion + + #region CleanupChapters + + [Fact] + public void CleanupChapters_AllFilesShouldBeDeleted() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData("")); + filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData("")); + filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData("")); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + cleanupService.CleanupChapters(new []{1, 3}); + Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories)); + } + + + #endregion + + #region GetCachedEpubFile + + [Fact] + public void GetCachedEpubFile_ShouldReturnFirstEpub() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData("")); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + var c = new Chapter() + { + Files = new List() + { + new MangaFile() + { + FilePath = $"{DataDirectory}1.epub" + }, + new MangaFile() + { + FilePath = $"{DataDirectory}2.epub" + } + } + }; + cs.GetCachedFile(c); + Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c)); + } + + #endregion + + #region GetCachedPagePath + + [Fact] + public void GetCachedPagePath_ReturnNullIfNoFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + }; + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages - 1; i++) + { + filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + var path = cs.GetCachedPagePath(c, 11); + Assert.Equal(string.Empty, path); + } + + [Fact] + public void GetCachedPagePath_GetFileFromFirstFile() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + { + new MangaFile() + { + Id = 1, + FilePath = $"{DataDirectory}1.zip", + Pages = 10 + + }, + new MangaFile() + { + Id = 2, + FilePath = $"{DataDirectory}2.zip", + Pages = 5 + } + } + }; + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages; i++) + { + filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0))); + + } + + + [Fact] + public void GetCachedPagePath_GetLastPageFromSingleFile() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + { + new MangaFile() + { + Id = 1, + FilePath = $"{DataDirectory}1.zip", + Pages = 10 + + } + } + }; + c.Pages = c.Files.Sum(f => f.Pages); + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages; i++) + { + filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + // Remember that we start at 0, so this is the 10th file + var path = cs.GetCachedPagePath(c, c.Pages); + Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path)); + } + + [Fact] + public void GetCachedPagePath_GetFileFromSecondFile() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + { + new MangaFile() + { + Id = 1, + FilePath = $"{DataDirectory}1.zip", + Pages = 10 + + }, + new MangaFile() + { + Id = 2, + FilePath = $"{DataDirectory}2.zip", + Pages = 5 + } + } + }; + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages; i++) + { + filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), + Substitute.For(), Substitute.For(), ds), Substitute.For()); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + // Remember that we start at 0, so this is the page + 1 file + var path = cs.GetCachedPagePath(c, 10); + Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path)); + } + + #endregion + + #region ExtractChapterFiles + + // [Fact] + // public void ExtractChapterFiles_ShouldExtractOnlyImages() + // { + // const string testDirectory = "/manga/"; + // var fileSystem = new MockFileSystem(); + // for (var i = 0; i < 10; i++) + // { + // fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + // } + // + // fileSystem.AddDirectory(CacheDirectory); + // + // var ds = new DirectoryService(Substitute.For>(), fileSystem); + // var cs = new CacheService(_logger, _unitOfWork, ds, + // new MockReadingItemServiceForCacheService(ds)); + // + // + // cs.ExtractChapterFiles(CacheDirectory, new List() + // { + // new MangaFile() + // { + // ChapterId = 1, + // Format = MangaFormat.Archive, + // Pages = 2, + // FilePath = + // } + // }) + // } + + #endregion +} diff --git a/API.Tests/Services/CleanupServiceTests.cs b/API.Tests/Services/CleanupServiceTests.cs index a0934a5ca..5c60baf4d 100644 --- a/API.Tests/Services/CleanupServiceTests.cs +++ b/API.Tests/Services/CleanupServiceTests.cs @@ -38,6 +38,7 @@ public class CleanupServiceTests private const string CacheDirectory = "C:/kavita/config/cache/"; private const string CoverImageDirectory = "C:/kavita/config/covers/"; private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string LogDirectory = "C:/kavita/config/logs/"; private const string BookmarkDirectory = "C:/kavita/config/bookmarks/"; @@ -84,6 +85,9 @@ public class CleanupServiceTests setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync(); setting.Value = BookmarkDirectory; + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync(); + setting.Value = "10"; + _context.ServerSetting.Update(setting); _context.Library.Add(new Library() @@ -347,7 +351,7 @@ public class CleanupServiceTests var ds = new DirectoryService(Substitute.For>(), filesystem); var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, ds); - cleanupService.CleanupCacheDirectory(); + cleanupService.CleanupCacheAndTempDirectories(); Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories)); } @@ -361,7 +365,7 @@ public class CleanupServiceTests var ds = new DirectoryService(Substitute.For>(), filesystem); var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, ds); - cleanupService.CleanupCacheDirectory(); + cleanupService.CleanupCacheAndTempDirectories(); Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories)); } @@ -412,6 +416,59 @@ public class CleanupServiceTests #endregion + #region CleanupLogs + + [Fact] + public async Task CleanupLogs_LeaveOneFile_SinceAllAreExpired() + { + var filesystem = CreateFileSystem(); + foreach (var i in Enumerable.Range(1, 10)) + { + var day = API.Services.Tasks.Scanner.Parser.Parser.PadZeros($"{i}"); + filesystem.AddFile($"{LogDirectory}kavita202009{day}.log", new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31)) + }); + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + await cleanupService.CleanupLogs(); + Assert.Single(ds.GetFiles(LogDirectory, searchOption: SearchOption.AllDirectories)); + } + + [Fact] + public async Task CleanupLogs_LeaveLestExpired() + { + var filesystem = CreateFileSystem(); + foreach (var i in Enumerable.Range(1, 9)) + { + var day = API.Services.Tasks.Scanner.Parser.Parser.PadZeros($"{i}"); + filesystem.AddFile($"{LogDirectory}kavita202009{day}.log", new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31 - i)) + }); + } + filesystem.AddFile($"{LogDirectory}kavita20200910.log", new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31 - 10)) + }); + filesystem.AddFile($"{LogDirectory}kavita20200911.log", new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31 - 11)) + }); + + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + await cleanupService.CleanupLogs(); + Assert.True(filesystem.File.Exists($"{LogDirectory}kavita20200911.log")); + } + + #endregion + // #region CleanupBookmarks // // [Fact] diff --git a/API.Tests/Services/DeviceServiceTests.cs b/API.Tests/Services/DeviceServiceTests.cs new file mode 100644 index 000000000..717f3e98b --- /dev/null +++ b/API.Tests/Services/DeviceServiceTests.cs @@ -0,0 +1,80 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using API.DTOs.Device; +using API.Entities; +using API.Entities.Enums.Device; +using API.Services; +using API.Services.Tasks; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services; + +public class DeviceServiceTests : BasicTest +{ + private readonly ILogger _logger = Substitute.For>(); + private readonly IDeviceService _deviceService; + + public DeviceServiceTests() : base() + { + _deviceService = new DeviceService(_unitOfWork, _logger, Substitute.For()); + } + + protected new Task ResetDb() + { + _context.Users.RemoveRange(_context.Users.ToList()); + return Task.CompletedTask; + } + + + + [Fact] + public async Task CreateDevice_Succeeds() + { + + var user = new AppUser() + { + UserName = "majora2007", + Devices = new List() + }; + + _context.Users.Add(user); + await _unitOfWork.CommitAsync(); + + var device = await _deviceService.Create(new CreateDeviceDto() + { + EmailAddress = "fake@kindle.com", + Name = "Test Kindle", + Platform = DevicePlatform.Kindle + }, user); + + Assert.NotNull(device); + + } + + [Fact] + public async Task CreateDevice_ThrowsErrorWhenEmailDoesntMatchRules() + { + + var user = new AppUser() + { + UserName = "majora2007", + Devices = new List() + }; + + _context.Users.Add(user); + await _unitOfWork.CommitAsync(); + + var device = await _deviceService.Create(new CreateDeviceDto() + { + EmailAddress = "fake@gmail.com", + Name = "Test Kindle", + Platform = DevicePlatform.Kindle + }, user); + + Assert.NotNull(device); + + } +} diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs index b6ebf6722..254d851fa 100644 --- a/API.Tests/Services/DirectoryServiceTests.cs +++ b/API.Tests/Services/DirectoryServiceTests.cs @@ -10,942 +10,941 @@ using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; -namespace API.Tests.Services +namespace API.Tests.Services; + +public class DirectoryServiceTests { + private readonly ILogger _logger = Substitute.For>(); - public class DirectoryServiceTests + + #region TraverseTreeParallelForEach + [Fact] + public void TraverseTreeParallelForEach_JustArchives_ShouldBe28() { - private readonly ILogger _logger = Substitute.For>(); - - - #region TraverseTreeParallelForEach - [Fact] - public void TraverseTreeParallelForEach_JustArchives_ShouldBe28() + var testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 28; i++) { - var testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 28; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = new List(); - var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), - API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions, _logger); - - Assert.Equal(28, fileCount); - Assert.Equal(28, files.Count); + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - [Fact] - public void TraverseTreeParallelForEach_LongDirectory_ShouldBe1() + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = new List(); + var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), + API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions, _logger); + + Assert.Equal(28, fileCount); + Assert.Equal(28, files.Count); + } + + [Fact] + public void TraverseTreeParallelForEach_LongDirectory_ShouldBe1() + { + var fileSystem = new MockFileSystem(); + // Create a super long path + var testDirectory = "/manga/"; + for (var i = 0; i < 200; i++) { - var fileSystem = new MockFileSystem(); - // Create a super long path - var testDirectory = "/manga/"; - for (var i = 0; i < 200; i++) - { - testDirectory = fileSystem.FileSystem.Path.Join(testDirectory, "supercalifragilisticexpialidocious"); - } - - - fileSystem.AddFile(fileSystem.FileSystem.Path.Join(testDirectory, "file_29.jpg"), new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = new List(); - try - { - var fileCount = ds.TraverseTreeParallelForEach("/manga/", s => files.Add(s), - API.Services.Tasks.Scanner.Parser.Parser.ImageFileExtensions, _logger); - Assert.Equal(1, fileCount); - } - catch (Exception ex) - { - Assert.False(true); - } - - - Assert.Equal(1, files.Count); + testDirectory = fileSystem.FileSystem.Path.Join(testDirectory, "supercalifragilisticexpialidocious"); } + fileSystem.AddFile(fileSystem.FileSystem.Path.Join(testDirectory, "file_29.jpg"), new MockFileData("")); - [Fact] - public void TraverseTreeParallelForEach_DontCountExcludedDirectories_ShouldBe28() + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = new List(); + try { - var testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 28; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{Path.Join(testDirectory, "@eaDir")}file_{29}.jpg", new MockFileData("")); - fileSystem.AddFile($"{Path.Join(testDirectory, ".DS_Store")}file_{30}.jpg", new MockFileData("")); - fileSystem.AddFile($"{Path.Join(testDirectory, ".qpkg")}file_{30}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = new List(); - var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), - API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions, _logger); - - Assert.Equal(28, fileCount); - Assert.Equal(28, files.Count); + var fileCount = ds.TraverseTreeParallelForEach("/manga/", s => files.Add(s), + API.Services.Tasks.Scanner.Parser.Parser.ImageFileExtensions, _logger); + Assert.Equal(1, fileCount); } - #endregion - - #region GetFilesWithCertainExtensions - [Fact] - public void GetFilesWithCertainExtensions_ShouldBe10() + catch (Exception ex) { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFilesWithExtension(testDirectory, API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions); - - Assert.Equal(10, files.Length); - Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip")); - } - - [Fact] - public void GetFilesWithCertainExtensions_OnlyArchives() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}file_{29}.rar", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFilesWithExtension(testDirectory, ".zip|.rar"); - - Assert.Equal(11, files.Length); - } - #endregion - - #region GetFiles - [Fact] - public void GetFiles_ArchiveOnly_ShouldBe10() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory, API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions).ToList(); - - Assert.Equal(10, files.Count()); - Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip")); - } - - [Fact] - public void GetFiles_All_ShouldBe11() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory).ToList(); - - Assert.Equal(11, files.Count()); - } - - [Fact] - public void GetFiles_All_MixedPathSeparators() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"/manga\\file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory).ToList(); - - Assert.Equal(11, files.Count()); - } - - [Fact] - public void GetFiles_All_TopDirectoryOnly_ShouldBe10() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory).ToList(); - - Assert.Equal(10, files.Count()); - } - - [Fact] - public void GetFiles_WithSubDirectories_ShouldCountOnlyTopLevel() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory).ToList(); - - Assert.Equal(10, files.Count()); - } - - [Fact] - public void GetFiles_ShouldNotReturnFilesThatAreExcluded() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - fileSystem.AddFile($"{testDirectory}/._file_{29}.jpg", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory).ToList(); - - Assert.Equal(10, files.Count()); - } - - [Fact] - public void GetFiles_WithCustomRegex_ShouldBe10() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}data-{i}.txt", new MockFileData("")); - } - fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); - fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory, @".*d.*\.txt"); - Assert.Equal(11, files.Count()); - } - - [Fact] - public void GetFiles_WithCustomRegexThatContainsFolder_ShouldBe10() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("")); - } - fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); - fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var files = ds.GetFiles(testDirectory, @".*d.*\.txt", SearchOption.AllDirectories); - Assert.Equal(11, files.Count()); - } - #endregion - - #region GetTotalSize - [Fact] - public void GetTotalSize_ShouldBeGreaterThan0() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); - } - fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); - - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var fileSize = ds.GetTotalSize(fileSystem.AllFiles); - Assert.True(fileSize > 0); - } - #endregion - - #region CopyFileToDirectory - [Fact] - public void CopyFileToDirectory_ShouldCopyFileToNonExistentDirectory() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/"); - Assert.True(fileSystem.FileExists("manga/output/data-0.txt")); - Assert.True(fileSystem.FileExists("manga/file/data-0.txt")); - } - [Fact] - public void CopyFileToDirectory_ShouldCopyFileToExistingDirectoryAndOverwrite() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/"); - Assert.True(fileSystem.FileExists("/manga/output/data-0.txt")); - Assert.True(fileSystem.FileExists("/manga/file/data-0.txt")); - Assert.True(fileSystem.FileInfo.FromFileName("/manga/file/data-0.txt").Length == fileSystem.FileInfo.FromFileName("/manga/output/data-0.txt").Length); - } - #endregion - - #region CopyDirectoryToDirectory - [Fact] - public void CopyDirectoryToDirectory_ShouldThrowWhenSourceDestinationDoesntExist() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var ex = Assert.Throws(() => ds.CopyDirectoryToDirectory("/comics/", "/manga/output/")); - Assert.Equal(ex.Message, "Source directory does not exist or could not be found: " + "/comics/"); - } - - [Fact] - public void CopyDirectoryToDirectory_ShouldCopyEmptyDirectory() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); - fileSystem.AddDirectory($"{testDirectory}empty/"); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyDirectoryToDirectory($"{testDirectory}empty/", "/manga/output/"); - Assert.Empty(fileSystem.DirectoryInfo.FromDirectoryName("/manga/output/").GetFiles()); - } - - [Fact] - public void CopyDirectoryToDirectory_ShouldCopyAllFileAndNestedDirectoriesOver() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-1.txt", new MockFileData("abc")); - fileSystem.AddDirectory($"{testDirectory}empty/"); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyDirectoryToDirectory($"{testDirectory}", "/manga/output/"); - Assert.Equal(2, ds.GetFiles("/manga/output/", searchOption: SearchOption.AllDirectories).Count()); - } - #endregion - - #region IsDriveMounted - [Fact] - public void IsDriveMounted_DriveIsNotMounted() - { - const string testDirectory = "c:/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - Assert.False(ds.IsDriveMounted("d:/manga/")); - } - - [Fact] - public void IsDriveMounted_DriveIsMounted() - { - const string testDirectory = "c:/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - Assert.True(ds.IsDriveMounted("c:/manga/file")); - } - #endregion - - #region IsDirectoryEmpty - [Fact] - public void IsDirectoryEmpty_DirectoryIsEmpty() - { - const string testDirectory = "c:/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(testDirectory); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - Assert.True(ds.IsDirectoryEmpty("c:/manga/")); - } - - [Fact] - public void IsDirectoryEmpty_DirectoryIsNotEmpty() - { - const string testDirectory = "c:/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - Assert.False(ds.IsDirectoryEmpty("c:/manga/")); - } - #endregion - - #region ExistOrCreate - [Fact] - public void ExistOrCreate_ShouldCreate() - { - var fileSystem = new MockFileSystem(); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.ExistOrCreate("c:/manga/output/"); - - Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("c:/manga/output/").Exists); - } - #endregion - - #region ClearAndDeleteDirectory - [Fact] - public void ClearAndDeleteDirectory_ShouldDeleteSelfAndAllFilesAndFolders() - { - const string testDirectory = "/manga/base/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); - } - fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc")); - fileSystem.AddDirectory($"{testDirectory}empty/"); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.ClearAndDeleteDirectory($"{testDirectory}"); - Assert.Empty(ds.GetFiles("/manga/", searchOption: SearchOption.AllDirectories)); - Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").GetDirectories()); - Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists); - Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/base").Exists); - } - #endregion - - #region ClearDirectory - [Fact] - public void ClearDirectory_ShouldDeleteAllFilesAndFolders_LeaveSelf() - { - const string testDirectory = "/manga/base/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); - } - fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc")); - fileSystem.AddDirectory($"{testDirectory}file/empty/"); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.ClearDirectory($"{testDirectory}file/"); - Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").GetDirectories()); - Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists); - Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists); - } - - [Fact] - public void ClearDirectory_ShouldDeleteFoldersWithOneFileInside() - { - const string testDirectory = "/manga/base/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); - } - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.ClearDirectory($"{testDirectory}"); - Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}").GetDirectories()); - Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName(testDirectory).Exists); - Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists); - } - #endregion - - #region CopyFilesToDirectory - [Fact] - public void CopyFilesToDirectory_ShouldMoveAllFiles() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip"}, "/manga/output/"); - Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); - } - - [Fact] - public void CopyFilesToDirectory_ShouldMoveAllFilesAndNotFailOnNonExistentFiles() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{200}.zip", $"{testDirectory}file_{1}.zip"}, "/manga/output/"); - Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); - } - - [Fact] - public void CopyFilesToDirectory_ShouldMoveAllFiles_InclFilesInNestedFolders() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - fileSystem.AddFile($"{testDirectory}nested/file_11.zip", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, "/manga/output/"); - Assert.Equal(3, ds.GetFiles("/manga/output/").Count()); - } - - [Fact] - public void CopyFilesToDirectory_ShouldMoveAllFiles_WithPrepend() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, - "/manga/output/", "mangarocks_"); - Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); - Assert.All(ds.GetFiles("/manga/output/"), filepath => ds.FileSystem.Path.GetFileName(filepath).StartsWith("mangarocks_")); - } - - [Fact] - public void CopyFilesToDirectory_ShouldMoveOnlyFilesThatExist() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - for (var i = 0; i < 10; i++) - { - fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); - } - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, - "/manga/output/"); - Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); - } - - [Fact] - public void CopyFilesToDirectory_ShouldAppendWhenTargetFileExists() - { - - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile(MockUnixSupport.Path($"{testDirectory}file.zip"), new MockFileData("")); - fileSystem.AddFile(MockUnixSupport.Path($"/manga/output/file (1).zip"), new MockFileData("")); - fileSystem.AddFile(MockUnixSupport.Path($"/manga/output/file (2).zip"), new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.CopyFilesToDirectory(new []{MockUnixSupport.Path($"{testDirectory}file.zip")}, "/manga/output/"); - ds.CopyFilesToDirectory(new []{MockUnixSupport.Path($"{testDirectory}file.zip")}, "/manga/output/"); - var outputFiles = ds.GetFiles("/manga/output/").Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList(); - Assert.Equal(4, outputFiles.Count()); // we have 2 already there and 2 copies - // For some reason, this has C:/ on directory even though everything is emulated (System.IO.Abstractions issue, not changing) - // https://github.com/TestableIO/System.IO.Abstractions/issues/831 - Assert.True(outputFiles.Contains(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath("/manga/output/file (3).zip")) - || outputFiles.Contains(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath("C:/manga/output/file (3).zip"))); - } - - #endregion - - #region ListDirectory - [Fact] - public void ListDirectory_EmptyForNonExistent() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - Assert.Empty(ds.ListDirectory("/comics/")); - } - - [Fact] - public void ListDirectory_ListsAllDirectories() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory($"{testDirectory}dir1"); - fileSystem.AddDirectory($"{testDirectory}dir2"); - fileSystem.AddDirectory($"{testDirectory}dir3"); - fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - Assert.Equal(3, ds.ListDirectory(testDirectory).Count()); - } - - [Fact] - public void ListDirectory_ListsOnlyNonSystemAndHiddenOnly() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory($"{testDirectory}dir1"); - var di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir1"); - di.Attributes |= FileAttributes.System; - fileSystem.AddDirectory($"{testDirectory}dir2"); - di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir2"); - di.Attributes |= FileAttributes.Hidden; - fileSystem.AddDirectory($"{testDirectory}dir3"); - fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - Assert.Equal(1, ds.ListDirectory(testDirectory).Count()); - } - - #endregion - - #region ReadFileAsync - - [Fact] - public async Task ReadFileAsync_ShouldGetBytes() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var bytes = await ds.ReadFileAsync($"{testDirectory}file_1.zip"); - Assert.Equal(Encoding.UTF8.GetBytes("Hello"), bytes); - } - - [Fact] - public async Task ReadFileAsync_ShouldReadNothingFromNonExistent() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var bytes = await ds.ReadFileAsync($"{testDirectory}file_32123.zip"); - Assert.Empty(bytes); + Assert.False(true); } - #endregion + Assert.Equal(1, files.Count); + } - #region FindHighestDirectoriesFromFiles - [Theory] - [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] - [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")] - [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")] - [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga//Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] - [InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"}, new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"}, @"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")] - public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory) + + [Fact] + public void TraverseTreeParallelForEach_DontCountExcludedDirectories_ShouldBe28() + { + var testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 28; i++) { - var fileSystem = new MockFileSystem(); - foreach (var directory in rootDirectories) - { - fileSystem.AddDirectory(directory); - } - foreach (var f in files) - { - fileSystem.AddFile(f, new MockFileData("")); - } - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - var actual = ds.FindHighestDirectoriesFromFiles(rootDirectories, files); - var expected = new Dictionary(); - if (!string.IsNullOrEmpty(expectedDirectory)) - { - expected = new Dictionary {{expectedDirectory, ""}}; - } - - Assert.Equal(expected, actual); + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - #endregion + fileSystem.AddFile($"{Path.Join(testDirectory, "@eaDir")}file_{29}.jpg", new MockFileData("")); + fileSystem.AddFile($"{Path.Join(testDirectory, ".DS_Store")}file_{30}.jpg", new MockFileData("")); + fileSystem.AddFile($"{Path.Join(testDirectory, ".qpkg")}file_{30}.jpg", new MockFileData("")); - #region GetFoldersTillRoot + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = new List(); + var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), + API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions, _logger); - [Theory] - [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] - [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")] - [InlineData("C:/Manga", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] - [InlineData("C:/Manga", @"C:\Manga\Love Hina\Specials\Omake\", "Omake,Specials,Love Hina")] - [InlineData(@"/manga/", @"/manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] - [InlineData(@"/manga/", @"/manga/", "")] - [InlineData(@"E:\test", @"E:\test\Sweet X Trouble\Sweet X Trouble - Chapter 001.cbz", "Sweet X Trouble")] - [InlineData(@"C:\/mount/gdrive/Library/Test Library/Comics/", @"C:\/mount/gdrive/Library/Test Library/Comics\godzilla rivals vs hedorah\vol 1\", "vol 1,godzilla rivals vs hedorah")] - [InlineData(@"/manga/", @"/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] - [InlineData(@"C:/", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] - [InlineData(@"C:\\", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] - [InlineData(@"C://mount/gdrive/Library/Test Library/Comics", @"C://mount/gdrive/Library/Test Library/Comics/Dragon Age/Test", "Test,Dragon Age")] - [InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")] - public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray) + Assert.Equal(28, fileCount); + Assert.Equal(28, files.Count); + } + #endregion + + #region GetFilesWithCertainExtensions + [Fact] + public void GetFilesWithCertainExtensions_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(rootPath); - fileSystem.AddFile(fullpath, new MockFileData("")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - var expected = expectedArray.Split(","); - if (expectedArray.Equals(string.Empty)) - { - expected = Array.Empty(); - } - Assert.Equal(expected, ds.GetFoldersTillRoot(rootPath, fullpath)); + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - #endregion + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - #region RemoveNonImages + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFilesWithExtension(testDirectory, API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions); - [Fact] - public void RemoveNonImages() + Assert.Equal(10, files.Length); + Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip")); + } + + [Fact] + public void GetFilesWithCertainExtensions_OnlyArchives() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(testDirectory); - fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.RemoveNonImages($"{testDirectory}"); - Assert.False(fileSystem.FileExists($"{testDirectory}file/data-0.txt")); - Assert.Equal(3, ds.GetFiles($"{testDirectory}", searchOption:SearchOption.AllDirectories).Count()); + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - #endregion + fileSystem.AddFile($"{testDirectory}file_{29}.rar", new MockFileData("")); - #region Flatten + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFilesWithExtension(testDirectory, ".zip|.rar"); - [Fact] - public void Flatten_ShouldDoNothing() + Assert.Equal(11, files.Length); + } + #endregion + + #region GetFiles + [Fact] + public void GetFiles_ArchiveOnly_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(testDirectory); - fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.Flatten($"{testDirectory}"); - Assert.True(fileSystem.FileExists($"{testDirectory}data-1.jpg")); - Assert.True(fileSystem.FileExists($"{testDirectory}data-2.png")); - Assert.True(fileSystem.FileExists($"{testDirectory}data-3.webp")); + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - [Fact] - public void Flatten_ShouldFlatten() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(testDirectory); - fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}subdir/data-3.webp", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.Flatten($"{testDirectory}"); - Assert.Equal(2, ds.GetFiles(testDirectory).Count()); - Assert.False(fileSystem.FileExists($"{testDirectory}subdir/data-3.webp")); - Assert.True(fileSystem.Directory.Exists($"{testDirectory}subdir/")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory, API.Services.Tasks.Scanner.Parser.Parser.ArchiveFileExtensions).ToList(); + + Assert.Equal(10, files.Count()); + Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip")); + } + + [Fact] + public void GetFiles_All_ShouldBe11() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - [Fact] - public void Flatten_ShouldFlatten_WithoutMacosx() - { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(testDirectory); - fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}subdir/data-3.webp", new MockFileData("abc")); - fileSystem.AddFile($"{testDirectory}__MACOSX/data-4.webp", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - ds.Flatten($"{testDirectory}"); - Assert.Equal(2, ds.GetFiles(testDirectory).Count()); - Assert.False(fileSystem.FileExists($"{testDirectory}data-4.webp")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(11, files.Count()); + } + + [Fact] + public void GetFiles_All_MixedPathSeparators() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } - #endregion + fileSystem.AddFile($"/manga\\file_{29}.jpg", new MockFileData("")); - #region CheckWriteAccess + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); - [Fact] - public async Task CheckWriteAccess_ShouldHaveAccess() + Assert.Equal(11, files.Count()); + } + + [Fact] + public void GetFiles_All_TopDirectoryOnly_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - const string testDirectory = "/manga/"; - var fileSystem = new MockFileSystem(); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - var hasAccess = await ds.CheckWriteAccess(ds.FileSystem.Path.Join(testDirectory, "bookmarks")); - Assert.True(hasAccess); - - Assert.False(ds.FileSystem.Directory.Exists(ds.FileSystem.Path.Join(testDirectory, "bookmarks"))); - Assert.False(ds.FileSystem.File.Exists(ds.FileSystem.Path.Join(testDirectory, "bookmarks", "test.txt"))); + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } + fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData("")); - #endregion + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); - #region GetHumanReadableBytes + Assert.Equal(10, files.Count()); + } - [Theory] - [InlineData(1200, "1.17 KB")] - [InlineData(1, "1 B")] - [InlineData(10000000, "9.54 MB")] - [InlineData(10000000000, "9.31 GB")] - public void GetHumanReadableBytesTest(long bytes, string expected) + [Fact] + public void GetFiles_WithSubDirectories_ShouldCountOnlyTopLevel() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes)); - } - #endregion - - #region ScanFiles - - [Fact] - public Task ScanFiles_ShouldFindNoFiles_AllAreIgnored() - { - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory("C:/Data/"); - fileSystem.AddDirectory("C:/Data/Accel World"); - fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); - fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/.kavitaignore", new MockFileData("*.*")); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - - var allFiles = ds.ScanFiles("C:/Data/"); - - Assert.Equal(0, allFiles.Count); - - return Task.CompletedTask; + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } + fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData("")); - [Fact] - public Task ScanFiles_ShouldFindNoNestedFiles_IgnoreNestedFiles() + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(10, files.Count()); + } + + [Fact] + public void GetFiles_ShouldNotReturnFilesThatAreExcluded() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory("C:/Data/"); - fileSystem.AddDirectory("C:/Data/Accel World"); - fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); - fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/.kavitaignore", new MockFileData("**/Accel World/*")); - fileSystem.AddFile("C:/Data/Hello.pdf", new MockFileData(string.Empty)); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - var allFiles = ds.ScanFiles("C:/Data/"); - - Assert.Equal(1, allFiles.Count); // Ignore files are not counted in files, only valid extensions - - return Task.CompletedTask; + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } + fileSystem.AddFile($"{testDirectory}/._file_{29}.jpg", new MockFileData("")); - [Fact] - public Task ScanFiles_NestedIgnore_IgnoreNestedFilesInOneDirectoryOnly() + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(10, files.Count()); + } + + [Fact] + public void GetFiles_WithCustomRegex_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory("C:/Data/"); - fileSystem.AddDirectory("C:/Data/Accel World"); - fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); - fileSystem.AddDirectory("C:/Data/Specials/"); - fileSystem.AddDirectory("C:/Data/Specials/ArtBooks/"); - fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/.kavitaignore", new MockFileData("**/Accel World/*")); - fileSystem.AddFile("C:/Data/Specials/.kavitaignore", new MockFileData("**/ArtBooks/*")); - fileSystem.AddFile("C:/Data/Specials/Hi.pdf", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Specials/ArtBooks/art book 01.pdf", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Hello.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile($"{testDirectory}data-{i}.txt", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); + fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData("")); - var ds = new DirectoryService(Substitute.For>(), fileSystem); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory, @".*d.*\.txt"); + Assert.Equal(11, files.Count()); + } - var allFiles = ds.ScanFiles("C:/Data/"); + [Fact] + public void GetFiles_WithCustomRegexThatContainsFolder_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); + fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData("")); - Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory, @".*d.*\.txt", SearchOption.AllDirectories); + Assert.Equal(11, files.Count()); + } + #endregion - return Task.CompletedTask; + #region GetTotalSize + [Fact] + public void GetTotalSize_ShouldBeGreaterThan0() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); + + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var fileSize = ds.GetTotalSize(fileSystem.AllFiles); + Assert.True(fileSize > 0); + } + #endregion + + #region CopyFileToDirectory + [Fact] + public void CopyFileToDirectory_ShouldCopyFileToNonExistentDirectory() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/"); + Assert.True(fileSystem.FileExists("manga/output/data-0.txt")); + Assert.True(fileSystem.FileExists("manga/file/data-0.txt")); + } + [Fact] + public void CopyFileToDirectory_ShouldCopyFileToExistingDirectoryAndOverwrite() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/"); + Assert.True(fileSystem.FileExists("/manga/output/data-0.txt")); + Assert.True(fileSystem.FileExists("/manga/file/data-0.txt")); + Assert.True(fileSystem.FileInfo.FromFileName("/manga/file/data-0.txt").Length == fileSystem.FileInfo.FromFileName("/manga/output/data-0.txt").Length); + } + #endregion + + #region CopyDirectoryToDirectory + [Fact] + public void CopyDirectoryToDirectory_ShouldThrowWhenSourceDestinationDoesntExist() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var ex = Assert.Throws(() => ds.CopyDirectoryToDirectory("/comics/", "/manga/output/")); + Assert.Equal(ex.Message, "Source directory does not exist or could not be found: " + "/comics/"); + } + + [Fact] + public void CopyDirectoryToDirectory_ShouldCopyEmptyDirectory() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyDirectoryToDirectory($"{testDirectory}empty/", "/manga/output/"); + Assert.Empty(fileSystem.DirectoryInfo.FromDirectoryName("/manga/output/").GetFiles()); + } + + [Fact] + public void CopyDirectoryToDirectory_ShouldCopyAllFileAndNestedDirectoriesOver() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-1.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyDirectoryToDirectory($"{testDirectory}", "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/", searchOption: SearchOption.AllDirectories).Count()); + } + #endregion + + #region IsDriveMounted + [Fact] + public void IsDriveMounted_DriveIsNotMounted() + { + const string testDirectory = "c:/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + Assert.False(ds.IsDriveMounted("d:/manga/")); + } + + [Fact] + public void IsDriveMounted_DriveIsMounted() + { + const string testDirectory = "c:/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + Assert.True(ds.IsDriveMounted("c:/manga/file")); + } + #endregion + + #region IsDirectoryEmpty + [Fact] + public void IsDirectoryEmpty_DirectoryIsEmpty() + { + const string testDirectory = "c:/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + Assert.True(ds.IsDirectoryEmpty("c:/manga/")); + } + + [Fact] + public void IsDirectoryEmpty_DirectoryIsNotEmpty() + { + const string testDirectory = "c:/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + Assert.False(ds.IsDirectoryEmpty("c:/manga/")); + } + #endregion + + #region ExistOrCreate + [Fact] + public void ExistOrCreate_ShouldCreate() + { + var fileSystem = new MockFileSystem(); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ExistOrCreate("c:/manga/output/"); + + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("c:/manga/output/").Exists); + } + #endregion + + #region ClearAndDeleteDirectory + [Fact] + public void ClearAndDeleteDirectory_ShouldDeleteSelfAndAllFilesAndFolders() + { + const string testDirectory = "/manga/base/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ClearAndDeleteDirectory($"{testDirectory}"); + Assert.Empty(ds.GetFiles("/manga/", searchOption: SearchOption.AllDirectories)); + Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").GetDirectories()); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists); + Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/base").Exists); + } + #endregion + + #region ClearDirectory + [Fact] + public void ClearDirectory_ShouldDeleteAllFilesAndFolders_LeaveSelf() + { + const string testDirectory = "/manga/base/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}file/empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ClearDirectory($"{testDirectory}file/"); + Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").GetDirectories()); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists); + } + + [Fact] + public void ClearDirectory_ShouldDeleteFoldersWithOneFileInside() + { + const string testDirectory = "/manga/base/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); } + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ClearDirectory($"{testDirectory}"); + Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}").GetDirectories()); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName(testDirectory).Exists); + Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists); + } + #endregion - [Fact] - public Task ScanFiles_ShouldFindAllFiles() + #region CopyFilesToDirectory + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFiles() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) { - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory("C:/Data/"); - fileSystem.AddDirectory("C:/Data/Accel World"); - fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); - fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.txt", new MockFileData(string.Empty)); - fileSystem.AddFile("C:/Data/Nothing.pdf", new MockFileData(string.Empty)); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - - var allFiles = ds.ScanFiles("C:/Data/"); - - Assert.Equal(5, allFiles.Count); - - return Task.CompletedTask; + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); } + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip"}, "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFilesAndNotFailOnNonExistentFiles() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{200}.zip", $"{testDirectory}file_{1}.zip"}, "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFiles_InclFilesInNestedFolders() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}nested/file_11.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, "/manga/output/"); + Assert.Equal(3, ds.GetFiles("/manga/output/").Count()); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFiles_WithPrepend() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, + "/manga/output/", "mangarocks_"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + Assert.All(ds.GetFiles("/manga/output/"), filepath => ds.FileSystem.Path.GetFileName(filepath).StartsWith("mangarocks_")); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveOnlyFilesThatExist() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, + "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + } + + [Fact] + public void CopyFilesToDirectory_ShouldAppendWhenTargetFileExists() + { + + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile(MockUnixSupport.Path($"{testDirectory}file.zip"), new MockFileData("")); + fileSystem.AddFile(MockUnixSupport.Path($"/manga/output/file (1).zip"), new MockFileData("")); + fileSystem.AddFile(MockUnixSupport.Path($"/manga/output/file (2).zip"), new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{MockUnixSupport.Path($"{testDirectory}file.zip")}, "/manga/output/"); + ds.CopyFilesToDirectory(new []{MockUnixSupport.Path($"{testDirectory}file.zip")}, "/manga/output/"); + var outputFiles = ds.GetFiles("/manga/output/").Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList(); + Assert.Equal(4, outputFiles.Count()); // we have 2 already there and 2 copies + // For some reason, this has C:/ on directory even though everything is emulated (System.IO.Abstractions issue, not changing) + // https://github.com/TestableIO/System.IO.Abstractions/issues/831 + Assert.True(outputFiles.Contains(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath("/manga/output/file (3).zip")) + || outputFiles.Contains(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath("C:/manga/output/file (3).zip"))); + } + + #endregion + + #region ListDirectory + [Fact] + public void ListDirectory_EmptyForNonExistent() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Empty(ds.ListDirectory("/comics/")); + } + + [Fact] + public void ListDirectory_ListsAllDirectories() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory($"{testDirectory}dir1"); + fileSystem.AddDirectory($"{testDirectory}dir2"); + fileSystem.AddDirectory($"{testDirectory}dir3"); + fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Equal(3, ds.ListDirectory(testDirectory).Count()); + } + + [Fact] + public void ListDirectory_ListsOnlyNonSystemAndHiddenOnly() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory($"{testDirectory}dir1"); + var di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir1"); + di.Attributes |= FileAttributes.System; + fileSystem.AddDirectory($"{testDirectory}dir2"); + di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir2"); + di.Attributes |= FileAttributes.Hidden; + fileSystem.AddDirectory($"{testDirectory}dir3"); + fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Equal(1, ds.ListDirectory(testDirectory).Count()); + } + + #endregion + + #region ReadFileAsync + + [Fact] + public async Task ReadFileAsync_ShouldGetBytes() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var bytes = await ds.ReadFileAsync($"{testDirectory}file_1.zip"); + Assert.Equal(Encoding.UTF8.GetBytes("Hello"), bytes); + } + + [Fact] + public async Task ReadFileAsync_ShouldReadNothingFromNonExistent() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var bytes = await ds.ReadFileAsync($"{testDirectory}file_32123.zip"); + Assert.Empty(bytes); + } + + + #endregion + + #region FindHighestDirectoriesFromFiles + + [Theory] + [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] + [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")] + [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")] + [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga//Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] + [InlineData(new [] {@"C:\mount\drive\Library\Test Library\Comics\"}, new [] {@"C:\mount\drive\Library\Test Library\Comics\Bruce Lee (1994)\Bruce Lee #001 (1994).cbz"}, @"C:/mount/drive/Library/Test Library/Comics/Bruce Lee (1994)")] + public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory) + { + var fileSystem = new MockFileSystem(); + foreach (var directory in rootDirectories) + { + fileSystem.AddDirectory(directory); + } + foreach (var f in files) + { + fileSystem.AddFile(f, new MockFileData("")); + } + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + var actual = ds.FindHighestDirectoriesFromFiles(rootDirectories, files); + var expected = new Dictionary(); + if (!string.IsNullOrEmpty(expectedDirectory)) + { + expected = new Dictionary {{expectedDirectory, ""}}; + } + + Assert.Equal(expected, actual); + } + + #endregion + + #region GetFoldersTillRoot + + [Theory] + [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] + [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")] + [InlineData("C:/Manga", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] + [InlineData("C:/Manga", @"C:\Manga\Love Hina\Specials\Omake\", "Omake,Specials,Love Hina")] + [InlineData(@"/manga/", @"/manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] + [InlineData(@"/manga/", @"/manga/", "")] + [InlineData(@"E:\test", @"E:\test\Sweet X Trouble\Sweet X Trouble - Chapter 001.cbz", "Sweet X Trouble")] + [InlineData(@"C:\/mount/gdrive/Library/Test Library/Comics/", @"C:\/mount/gdrive/Library/Test Library/Comics\godzilla rivals vs hedorah\vol 1\", "vol 1,godzilla rivals vs hedorah")] + [InlineData(@"/manga/", @"/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] + [InlineData(@"C:/", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] + [InlineData(@"C:\\", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")] + [InlineData(@"C://mount/gdrive/Library/Test Library/Comics", @"C://mount/gdrive/Library/Test Library/Comics/Dragon Age/Test", "Test,Dragon Age")] + [InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")] + public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray) + { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(rootPath); + fileSystem.AddFile(fullpath, new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + var expected = expectedArray.Split(","); + if (expectedArray.Equals(string.Empty)) + { + expected = Array.Empty(); + } + Assert.Equal(expected, ds.GetFoldersTillRoot(rootPath, fullpath)); + } + + #endregion + + #region RemoveNonImages + + [Fact] + public void RemoveNonImages() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.RemoveNonImages($"{testDirectory}"); + Assert.False(fileSystem.FileExists($"{testDirectory}file/data-0.txt")); + Assert.Equal(3, ds.GetFiles($"{testDirectory}", searchOption:SearchOption.AllDirectories).Count()); + } + + #endregion + + #region Flatten + + [Fact] + public void Flatten_ShouldDoNothing() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.Flatten($"{testDirectory}"); + Assert.True(fileSystem.FileExists($"{testDirectory}data-1.jpg")); + Assert.True(fileSystem.FileExists($"{testDirectory}data-2.png")); + Assert.True(fileSystem.FileExists($"{testDirectory}data-3.webp")); + } + + [Fact] + public void Flatten_ShouldFlatten() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}subdir/data-3.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.Flatten($"{testDirectory}"); + Assert.Equal(2, ds.GetFiles(testDirectory).Count()); + Assert.False(fileSystem.FileExists($"{testDirectory}subdir/data-3.webp")); + Assert.True(fileSystem.Directory.Exists($"{testDirectory}subdir/")); + } + + [Fact] + public void Flatten_ShouldFlatten_WithoutMacosx() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}subdir/data-3.webp", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}__MACOSX/data-4.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.Flatten($"{testDirectory}"); + Assert.Equal(2, ds.GetFiles(testDirectory).Count()); + Assert.False(fileSystem.FileExists($"{testDirectory}data-4.webp")); + } + + #endregion + + #region CheckWriteAccess + + [Fact] + public async Task CheckWriteAccess_ShouldHaveAccess() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var hasAccess = await ds.CheckWriteAccess(ds.FileSystem.Path.Join(testDirectory, "bookmarks")); + Assert.True(hasAccess); + + Assert.False(ds.FileSystem.Directory.Exists(ds.FileSystem.Path.Join(testDirectory, "bookmarks"))); + Assert.False(ds.FileSystem.File.Exists(ds.FileSystem.Path.Join(testDirectory, "bookmarks", "test.txt"))); + } + + + #endregion + + #region GetHumanReadableBytes + + [Theory] + [InlineData(1200, "1.17 KB")] + [InlineData(1, "1 B")] + [InlineData(10000000, "9.54 MB")] + [InlineData(10000000000, "9.31 GB")] + public void GetHumanReadableBytesTest(long bytes, string expected) + { + Assert.Equal(expected, DirectoryService.GetHumanReadableBytes(bytes)); + } + #endregion + + #region ScanFiles + + [Fact] + public Task ScanFiles_ShouldFindNoFiles_AllAreIgnored() + { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory("C:/Data/"); + fileSystem.AddDirectory("C:/Data/Accel World"); + fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); + fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/.kavitaignore", new MockFileData("*.*")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + + var allFiles = ds.ScanFiles("C:/Data/"); + + Assert.Equal(0, allFiles.Count); + + return Task.CompletedTask; + } + + + [Fact] + public Task ScanFiles_ShouldFindNoNestedFiles_IgnoreNestedFiles() + { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory("C:/Data/"); + fileSystem.AddDirectory("C:/Data/Accel World"); + fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); + fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/.kavitaignore", new MockFileData("**/Accel World/*")); + fileSystem.AddFile("C:/Data/Hello.pdf", new MockFileData(string.Empty)); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + var allFiles = ds.ScanFiles("C:/Data/"); + + Assert.Equal(1, allFiles.Count); // Ignore files are not counted in files, only valid extensions + + return Task.CompletedTask; + } + + + [Fact] + public Task ScanFiles_NestedIgnore_IgnoreNestedFilesInOneDirectoryOnly() + { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory("C:/Data/"); + fileSystem.AddDirectory("C:/Data/Accel World"); + fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); + fileSystem.AddDirectory("C:/Data/Specials/"); + fileSystem.AddDirectory("C:/Data/Specials/ArtBooks/"); + fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/.kavitaignore", new MockFileData("**/Accel World/*")); + fileSystem.AddFile("C:/Data/Specials/.kavitaignore", new MockFileData("**/ArtBooks/*")); + fileSystem.AddFile("C:/Data/Specials/Hi.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Specials/ArtBooks/art book 01.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Hello.pdf", new MockFileData(string.Empty)); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + var allFiles = ds.ScanFiles("C:/Data/"); + + Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions + + return Task.CompletedTask; + } + + + [Fact] + public Task ScanFiles_ShouldFindAllFiles() + { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory("C:/Data/"); + fileSystem.AddDirectory("C:/Data/Accel World"); + fileSystem.AddDirectory("C:/Data/Accel World/Specials/"); + fileSystem.AddFile("C:/Data/Accel World/Accel World v1.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Accel World v2.pdf", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.cbz", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Accel World/Specials/Accel World SP01.txt", new MockFileData(string.Empty)); + fileSystem.AddFile("C:/Data/Nothing.pdf", new MockFileData(string.Empty)); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + var allFiles = ds.ScanFiles("C:/Data/"); + + Assert.Equal(5, allFiles.Count); + + return Task.CompletedTask; + } + #endregion #region GetAllDirectories @@ -966,35 +965,50 @@ namespace API.Tests.Services #endregion - #region GetParentDirectory + #region GetParentDirectory - [Theory] - [InlineData(@"C:/file.txt", "C:/")] - [InlineData(@"C:/folder/file.txt", "C:/folder")] - [InlineData(@"C:/folder/subfolder/file.txt", "C:/folder/subfolder")] - public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected) + [Theory] + [InlineData(@"C:/file.txt", "C:/")] + [InlineData(@"C:/folder/file.txt", "C:/folder")] + [InlineData(@"C:/folder/subfolder/file.txt", "C:/folder/subfolder")] + public void GetParentDirectoryName_ShouldFindParentOfFiles(string path, string expected) + { + var fileSystem = new MockFileSystem(new Dictionary { - var fileSystem = new MockFileSystem(new Dictionary - { - { path, new MockFileData(string.Empty)} - }); + { path, new MockFileData(string.Empty)} + }); - var ds = new DirectoryService(Substitute.For>(), fileSystem); - Assert.Equal(expected, ds.GetParentDirectoryName(path)); - } - [Theory] - [InlineData(@"C:/folder", "C:/")] - [InlineData(@"C:/folder/subfolder", "C:/folder")] - [InlineData(@"C:/folder/subfolder/another", "C:/folder/subfolder")] - public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected) - { - var fileSystem = new MockFileSystem(); - fileSystem.AddDirectory(path); - - var ds = new DirectoryService(Substitute.For>(), fileSystem); - Assert.Equal(expected, ds.GetParentDirectoryName(path)); - } - - #endregion + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Equal(expected, ds.GetParentDirectoryName(path)); } + [Theory] + [InlineData(@"C:/folder", "C:/")] + [InlineData(@"C:/folder/subfolder", "C:/folder")] + [InlineData(@"C:/folder/subfolder/another", "C:/folder/subfolder")] + public void GetParentDirectoryName_ShouldFindParentOfDirectories(string path, string expected) + { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(path); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Equal(expected, ds.GetParentDirectoryName(path)); + } + + #endregion + + #region GetLastWriteTime + + [Fact] + public void GetLastWriteTime_ShouldReturnMaxTime_IfNoFiles() + { + const string dir = "C:/manga/"; + var filesystem = new MockFileSystem(); + filesystem.AddDirectory("C:/"); + filesystem.AddDirectory(dir); + var ds = new DirectoryService(Substitute.For>(), filesystem); + + Assert.Equal(DateTime.MaxValue, ds.GetLastWriteTime(dir)); + } + + #endregion } diff --git a/API.Tests/Services/MetadataServiceTests.cs b/API.Tests/Services/MetadataServiceTests.cs index 60a1bd0bd..01a084242 100644 --- a/API.Tests/Services/MetadataServiceTests.cs +++ b/API.Tests/Services/MetadataServiceTests.cs @@ -5,38 +5,37 @@ using System.IO.Abstractions.TestingHelpers; using API.Helpers; using API.Services; -namespace API.Tests.Services +namespace API.Tests.Services; + +public class MetadataServiceTests { - public class MetadataServiceTests + private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + private const string TestCoverImageFile = "thumbnail.jpg"; + private const string TestCoverArchive = @"c:\file in folder.zip"; + private readonly string _testCoverImageDirectory = Path.Join(Directory.GetCurrentDirectory(), @"../../../Services/Test Data/ArchiveService/CoverImages"); + //private readonly MetadataService _metadataService; + // private readonly IUnitOfWork _unitOfWork = Substitute.For(); + // private readonly IImageService _imageService = Substitute.For(); + // private readonly IBookService _bookService = Substitute.For(); + // private readonly IArchiveService _archiveService = Substitute.For(); + // private readonly ILogger _logger = Substitute.For>(); + // private readonly IHubContext _messageHub = Substitute.For>(); + private readonly ICacheHelper _cacheHelper; + + + public MetadataServiceTests() { - private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - private const string TestCoverImageFile = "thumbnail.jpg"; - private const string TestCoverArchive = @"c:\file in folder.zip"; - private readonly string _testCoverImageDirectory = Path.Join(Directory.GetCurrentDirectory(), @"../../../Services/Test Data/ArchiveService/CoverImages"); - //private readonly MetadataService _metadataService; - // private readonly IUnitOfWork _unitOfWork = Substitute.For(); - // private readonly IImageService _imageService = Substitute.For(); - // private readonly IBookService _bookService = Substitute.For(); - // private readonly IArchiveService _archiveService = Substitute.For(); - // private readonly ILogger _logger = Substitute.For>(); - // private readonly IHubContext _messageHub = Substitute.For>(); - private readonly ICacheHelper _cacheHelper; - - - public MetadataServiceTests() + //_metadataService = new MetadataService(_unitOfWork, _logger, _archiveService, _bookService, _imageService, _messageHub); + var file = new MockFileData("") { - //_metadataService = new MetadataService(_unitOfWork, _logger, _archiveService, _bookService, _imageService, _messageHub); - var file = new MockFileData("") - { - LastWriteTime = DateTimeOffset.Now.Subtract(TimeSpan.FromMinutes(1)) - }; - var fileSystem = new MockFileSystem(new Dictionary - { - { TestCoverArchive, file } - }); + LastWriteTime = DateTimeOffset.Now.Subtract(TimeSpan.FromMinutes(1)) + }; + var fileSystem = new MockFileSystem(new Dictionary + { + { TestCoverArchive, file } + }); - var fileService = new FileService(fileSystem); - _cacheHelper = new CacheHelper(fileService); - } + var fileService = new FileService(fileSystem); + _cacheHelper = new CacheHelper(fileService); } } diff --git a/API.Tests/Services/ParseScannedFilesTests.cs b/API.Tests/Services/ParseScannedFilesTests.cs index c019b9643..d5e235a80 100644 --- a/API.Tests/Services/ParseScannedFilesTests.cs +++ b/API.Tests/Services/ParseScannedFilesTests.cs @@ -12,6 +12,7 @@ using API.Entities.Enums; using API.Parser; using API.Services; using API.Services.Tasks.Scanner; +using API.Services.Tasks.Scanner.Parser; using API.SignalR; using API.Tests.Helpers; using AutoMapper; @@ -52,7 +53,7 @@ internal class MockReadingItemService : IReadingItemService public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1) { - throw new System.NotImplementedException(); + throw new NotImplementedException(); } public ParserInfo Parse(string path, string rootPath, LibraryType type) @@ -244,11 +245,11 @@ public class ParseScannedFilesTests var parsedSeries = new Dictionary>(); - void TrackFiles(Tuple> parsedInfo) + Task TrackFiles(Tuple> parsedInfo) { var skippedScan = parsedInfo.Item1; var parsedFiles = parsedInfo.Item2; - if (parsedFiles.Count == 0) return; + if (parsedFiles.Count == 0) return Task.CompletedTask; var foundParsedSeries = new ParsedSeries() { @@ -258,6 +259,7 @@ public class ParseScannedFilesTests }; parsedSeries.Add(foundParsedSeries, parsedFiles); + return Task.CompletedTask; } diff --git a/API.Tests/Services/ReaderServiceTests.cs b/API.Tests/Services/ReaderServiceTests.cs index f50cdd196..71ecc1543 100644 --- a/API.Tests/Services/ReaderServiceTests.cs +++ b/API.Tests/Services/ReaderServiceTests.cs @@ -471,6 +471,53 @@ public class ReaderServiceTests Assert.Equal("21", actualChapter.Range); } + [Fact] + public async Task GetNextChapterIdAsync_ShouldRollIntoNextVolumeWithFloat() + { + await ResetDb(); + + _context.Series.Add(new Series() + { + Name = "Test", + Library = new Library() { + Name = "Test LIb", + Type = LibraryType.Manga, + }, + Volumes = new List() + { + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", false, new List()), + EntityFactory.CreateChapter("2", false, new List()), + }), + EntityFactory.CreateVolume("1.5", new List() + { + EntityFactory.CreateChapter("21", false, new List()), + EntityFactory.CreateChapter("22", false, new List()), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("31", false, new List()), + EntityFactory.CreateChapter("32", false, new List()), + }), + } + }); + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007" + }); + + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + + + var nextChapter = await readerService.GetNextChapterIdAsync(1, 1, 2, 1); + var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(nextChapter); + Assert.Equal("21", actualChapter.Range); + } + [Fact] public async Task GetNextChapterIdAsync_ShouldRollIntoChaptersFromVolume() { @@ -895,6 +942,107 @@ public class ReaderServiceTests Assert.Equal("1", actualChapter.Range); } + [Fact] + public async Task GetPrevChapterIdAsync_ShouldGetPrevVolume_WithFloatVolume() + { + // V1 -> V2 + await ResetDb(); + + _context.Series.Add(new Series() + { + Name = "Test", + Library = new Library() { + Name = "Test LIb", + Type = LibraryType.Manga, + }, + Volumes = new List() + { + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", false, new List()), + EntityFactory.CreateChapter("2", false, new List()), + }), + EntityFactory.CreateVolume("1.5", new List() + { + EntityFactory.CreateChapter("21", false, new List()), + EntityFactory.CreateChapter("22", false, new List()), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List()), + EntityFactory.CreateChapter("32", false, new List()), + }), + } + }); + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007" + }); + + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + + var prevChapter = await readerService.GetPrevChapterIdAsync(1, 3, 5, 1); + var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter); + Assert.Equal("22", actualChapter.Range); + } + + [Fact] + public async Task GetPrevChapterIdAsync_ShouldGetPrevVolume_2() + { + await ResetDb(); + + _context.Series.Add(new Series() + { + Name = "Test", + Library = new Library() { + Name = "Test LIb", + Type = LibraryType.Manga, + }, + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("40", false, new List(), 1), + EntityFactory.CreateChapter("50", false, new List(), 1), + EntityFactory.CreateChapter("60", false, new List(), 1), + EntityFactory.CreateChapter("Some Special Title", true, new List(), 1), + }), + EntityFactory.CreateVolume("1997", new List() + { + EntityFactory.CreateChapter("1", false, new List(), 1), + }), + EntityFactory.CreateVolume("2001", new List() + { + EntityFactory.CreateChapter("21", false, new List(), 1), + }), + EntityFactory.CreateVolume("2005", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + }), + } + }); + + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007" + }); + + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + + // prevChapter should be id from ch.21 from volume 2001 + var prevChapter = await readerService.GetPrevChapterIdAsync(1, 4, 7, 1); + + var actualChapter = await _unitOfWork.ChapterRepository.GetChapterAsync(prevChapter); + Assert.NotNull(actualChapter); + Assert.Equal("21", actualChapter.Range); + } + [Fact] public async Task GetPrevChapterIdAsync_ShouldRollIntoPrevVolume() { @@ -2172,4 +2320,132 @@ public class ReaderServiceTests } #endregion + + #region MarkVolumesUntilAsRead + [Fact] + public async Task MarkVolumesUntilAsRead_ShouldMarkVolumesAsRead() + { + await ResetDb(); + _context.Series.Add(new Series() + { + Name = "Test", + Library = new Library() { + Name = "Test LIb", + Type = LibraryType.Manga, + }, + Volumes = new List() + + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("10", false, new List(), 1), + EntityFactory.CreateChapter("20", false, new List(), 1), + EntityFactory.CreateChapter("30", false, new List(), 1), + EntityFactory.CreateChapter("Some Special Title", true, new List(), 1), + }), + + EntityFactory.CreateVolume("1997", new List() + { + EntityFactory.CreateChapter("0", false, new List(), 1), + }), + EntityFactory.CreateVolume("2002", new List() + { + EntityFactory.CreateChapter("0", false, new List(), 1), + }), + EntityFactory.CreateVolume("2003", new List() + { + EntityFactory.CreateChapter("0", false, new List(), 1), + }), + } + }); + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007" + }); + + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + await readerService.MarkVolumesUntilAsRead(user, 1, 2002); + await _context.SaveChangesAsync(); + + // Validate loose leaf chapters don't get marked as read + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1))); + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(2, 1))); + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(3, 1))); + + // Validate that volumes 1997 and 2002 both have their respective chapter 0 marked as read + Assert.Equal(1, (await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(5, 1)).PagesRead); + Assert.Equal(1, (await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(6, 1)).PagesRead); + // Validate that the chapter 0 of the following volume (2003) is not read + Assert.Null(await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(7, 1)); + + } + + [Fact] + public async Task MarkVolumesUntilAsRead_ShouldMarkChapterBasedVolumesAsRead() + { + await ResetDb(); + _context.Series.Add(new Series() + { + Name = "Test", + Library = new Library() { + Name = "Test LIb", + Type = LibraryType.Manga, + }, + Volumes = new List() + + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("10", false, new List(), 1), + EntityFactory.CreateChapter("20", false, new List(), 1), + EntityFactory.CreateChapter("30", false, new List(), 1), + EntityFactory.CreateChapter("Some Special Title", true, new List(), 1), + }), + + EntityFactory.CreateVolume("1997", new List() + { + EntityFactory.CreateChapter("1", false, new List(), 1), + }), + EntityFactory.CreateVolume("2002", new List() + { + EntityFactory.CreateChapter("2", false, new List(), 1), + }), + EntityFactory.CreateVolume("2003", new List() + { + EntityFactory.CreateChapter("3", false, new List(), 1), + }), + } + }); + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007" + }); + + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + await readerService.MarkVolumesUntilAsRead(user, 1, 2002); + await _context.SaveChangesAsync(); + + // Validate loose leaf chapters don't get marked as read + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1))); + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(2, 1))); + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(3, 1))); + + // Validate volumes chapter 0 have read status + Assert.Equal(1, (await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(5, 1)).PagesRead); + Assert.Equal(1, (await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(6, 1)).PagesRead); + Assert.Null((await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(3, 1))); + } + + #endregion + } diff --git a/API.Tests/Services/ReadingListServiceTests.cs b/API.Tests/Services/ReadingListServiceTests.cs index 4df8fb688..6472f8fb1 100644 --- a/API.Tests/Services/ReadingListServiceTests.cs +++ b/API.Tests/Services/ReadingListServiceTests.cs @@ -4,10 +4,13 @@ using System.IO.Abstractions.TestingHelpers; using System.Linq; using System.Threading.Tasks; using API.Data; +using API.Data.Repositories; +using API.DTOs.ReadingLists; using API.Entities; using API.Entities.Enums; using API.Helpers; using API.Services; +using API.SignalR; using AutoMapper; using Microsoft.Data.Sqlite; using Microsoft.EntityFrameworkCore; @@ -79,9 +82,10 @@ public class ReadingListServiceTests private async Task ResetDb() { - _context.Series.RemoveRange(_context.Series.ToList()); - - await _context.SaveChangesAsync(); + _context.AppUser.RemoveRange(_context.AppUser); + _context.Series.RemoveRange(_context.Series); + _context.ReadingList.RemoveRange(_context.ReadingList); + await _unitOfWork.CommitAsync(); } private static MockFileSystem CreateFileSystem() @@ -99,11 +103,373 @@ public class ReadingListServiceTests #endregion + #region UpdateReadingListItemPosition - #region RemoveFullyReadItems + [Fact] + public async Task UpdateReadingListItemPosition_MoveLastToFirst_TwoItemsShouldShift() + { + await ResetDb(); + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + ReadingLists = new List(), + Libraries = new List() + { + new Library() + { + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test", + Metadata = DbFactory.SeriesMetadata(new List()), + Volumes = new List() + { + new Volume() + { + Name = "0", + Chapters = new List() + { + new Chapter() + { + Number = "1", + AgeRating = AgeRating.Everyone, + }, + new Chapter() + { + Number = "2", + AgeRating = AgeRating.X18Plus + }, + new Chapter() + { + Number = "3", + AgeRating = AgeRating.X18Plus + } + } + } + } + } + } + }, + } + }); + + await _context.SaveChangesAsync(); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.ReadingLists); + var readingList = new ReadingList(); + user.ReadingLists = new List() + { + readingList + }; + + await _readingListService.AddChaptersToReadingList(1, new List() {1, 2, 3}, readingList); + await _unitOfWork.CommitAsync(); + Assert.Equal(3, readingList.Items.Count); + + await _readingListService.UpdateReadingListItemPosition(new UpdateReadingListPosition() + { + FromPosition = 2, ToPosition = 0, ReadingListId = 1, ReadingListItemId = 3 + }); + + + Assert.Equal(3, readingList.Items.Count); + Assert.Equal(0, readingList.Items.Single(i => i.ChapterId == 3).Order); + Assert.Equal(1, readingList.Items.Single(i => i.ChapterId == 1).Order); + Assert.Equal(2, readingList.Items.Single(i => i.ChapterId == 2).Order); + } - // TODO: Implement all methods here #endregion + #region DeleteReadingListItem + + [Fact] + public async Task DeleteReadingListItem_DeleteFirstItem_SecondShouldBecomeFirst() + { + await ResetDb(); + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + ReadingLists = new List(), + Libraries = new List() + { + new Library() + { + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test", + Metadata = DbFactory.SeriesMetadata(new List()), + Volumes = new List() + { + new Volume() + { + Name = "0", + Chapters = new List() + { + new Chapter() + { + Number = "1", + AgeRating = AgeRating.Everyone + }, + new Chapter() + { + Number = "2", + AgeRating = AgeRating.X18Plus + } + } + } + } + } + } + }, + } + }); + + await _context.SaveChangesAsync(); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.ReadingLists); + var readingList = new ReadingList(); + user.ReadingLists = new List() + { + readingList + }; + + await _readingListService.AddChaptersToReadingList(1, new List() {1, 2}, readingList); + await _unitOfWork.CommitAsync(); + Assert.Equal(2, readingList.Items.Count); + + await _readingListService.DeleteReadingListItem(new UpdateReadingListPosition() + { + ReadingListId = 1, ReadingListItemId = 1 + }); + + Assert.Equal(1, readingList.Items.Count); + Assert.Equal(2, readingList.Items.First().ChapterId); + } + + #endregion + + #region RemoveFullyReadItems + + [Fact] + public async Task RemoveFullyReadItems_RemovesAllFullyReadItems() + { + await ResetDb(); + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + ReadingLists = new List(), + Libraries = new List() + { + new Library() + { + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test", + Metadata = DbFactory.SeriesMetadata(new List()), + Volumes = new List() + { + new Volume() + { + Name = "0", + Chapters = new List() + { + new Chapter() + { + Number = "1", + AgeRating = AgeRating.Everyone, + Pages = 1 + }, + new Chapter() + { + Number = "2", + AgeRating = AgeRating.X18Plus, + Pages = 1 + }, + new Chapter() + { + Number = "3", + AgeRating = AgeRating.X18Plus, + Pages = 1 + } + } + } + } + } + } + }, + } + }); + + await _context.SaveChangesAsync(); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.ReadingLists | AppUserIncludes.Progress); + var readingList = new ReadingList(); + user.ReadingLists = new List() + { + readingList + }; + + await _readingListService.AddChaptersToReadingList(1, new List() {1, 2, 3}, readingList); + await _unitOfWork.CommitAsync(); + Assert.Equal(3, readingList.Items.Count); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), + Substitute.For()); + // Mark 2 as fully read + await readerService.MarkChaptersAsRead(user, 1, + (await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(new List() {2})).ToList()); + await _unitOfWork.CommitAsync(); + + await _readingListService.RemoveFullyReadItems(1, user); + + + Assert.Equal(2, readingList.Items.Count); + Assert.DoesNotContain(readingList.Items, i => i.Id == 2); + } + + + #endregion + + + #region CalculateAgeRating + + [Fact] + public async Task CalculateAgeRating_ShouldUpdateToUnknown_IfNoneSet() + { + await ResetDb(); + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + ReadingLists = new List(), + Libraries = new List() + { + new Library() + { + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test", + Metadata = DbFactory.SeriesMetadata(new List()), + Volumes = new List() + { + new Volume() + { + Name = "0", + Chapters = new List() + { + new Chapter() + { + Number = "1", + }, + new Chapter() + { + Number = "2", + } + } + } + } + } + } + }, + } + }); + + await _context.SaveChangesAsync(); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.ReadingLists); + var readingList = new ReadingList(); + user.ReadingLists = new List() + { + readingList + }; + + await _readingListService.AddChaptersToReadingList(1, new List() {1, 2}, readingList); + + + _unitOfWork.UserRepository.Update(user); + await _unitOfWork.CommitAsync(); + + await _readingListService.CalculateReadingListAgeRating(readingList); + Assert.Equal(AgeRating.Unknown, readingList.AgeRating); + } + + [Fact] + public async Task CalculateAgeRating_ShouldUpdateToMax() + { + await ResetDb(); + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + ReadingLists = new List(), + Libraries = new List() + { + new Library() + { + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test", + Metadata = DbFactory.SeriesMetadata(new List()), + Volumes = new List() + { + new Volume() + { + Name = "0", + Chapters = new List() + { + new Chapter() + { + Number = "1", + }, + new Chapter() + { + Number = "2", + } + } + } + } + } + } + }, + } + }); + + await _context.SaveChangesAsync(); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.ReadingLists); + var readingList = new ReadingList(); + user.ReadingLists = new List() + { + readingList + }; + + await _readingListService.AddChaptersToReadingList(1, new List() {1, 2}, readingList); + + + _unitOfWork.UserRepository.Update(user); + await _unitOfWork.CommitAsync(); + + await _readingListService.CalculateReadingListAgeRating(readingList); + Assert.Equal(AgeRating.Unknown, readingList.AgeRating); + } + + #endregion } diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index f54f2d3e9..2298aa003 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -9,124 +9,123 @@ using API.Services.Tasks.Scanner; using API.Tests.Helpers; using Xunit; -namespace API.Tests.Services +namespace API.Tests.Services; + +public class ScannerServiceTests { - public class ScannerServiceTests + [Fact] + public void FindSeriesNotOnDisk_Should_Remove1() { - [Fact] - public void FindSeriesNotOnDisk_Should_Remove1() + var infos = new Dictionary>(); + + ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive}); + //AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub}); + + var existingSeries = new List { - var infos = new Dictionary>(); - - ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive}); - //AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub}); - - var existingSeries = new List + new Series() { - new Series() + Name = "Darker Than Black", + LocalizedName = "Darker Than Black", + OriginalName = "Darker Than Black", + Volumes = new List() { - Name = "Darker Than Black", - LocalizedName = "Darker Than Black", - OriginalName = "Darker Than Black", - Volumes = new List() + new Volume() { - new Volume() - { - Number = 1, - Name = "1" - } - }, - NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"), - Metadata = new SeriesMetadata(), - Format = MangaFormat.Epub - } - }; - - Assert.Equal(1, ScannerService.FindSeriesNotOnDisk(existingSeries, infos).Count()); - } - - [Fact] - public void FindSeriesNotOnDisk_Should_RemoveNothing_Test() - { - var infos = new Dictionary>(); - - ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive}); - ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive}); - ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive}); - - var existingSeries = new List - { - new Series() - { - Name = "Cage of Eden", - LocalizedName = "Cage of Eden", - OriginalName = "Cage of Eden", - NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Cage of Eden"), - Metadata = new SeriesMetadata(), - Format = MangaFormat.Archive + Number = 1, + Name = "1" + } }, - new Series() - { - Name = "Darker Than Black", - LocalizedName = "Darker Than Black", - OriginalName = "Darker Than Black", - NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"), - Metadata = new SeriesMetadata(), - Format = MangaFormat.Archive - } - }; - - - - Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos)); - } - - - // TODO: Figure out how to do this with ParseScannedFiles - // [Theory] - // [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] - // [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] - // [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")] - // [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] - // public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) - // { - // var collectedSeries = new ConcurrentDictionary>(); - // foreach (var seriesName in existingSeriesNames) - // { - // AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive}); - // } - // - // var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo() - // { - // Series = parsedInfoName, - // Format = MangaFormat.Archive - // }); - // - // Assert.Equal(expected, actualName); - // } - - // [Fact] - // public void RemoveMissingSeries_Should_RemoveSeries() - // { - // var existingSeries = new List() - // { - // EntityFactory.CreateSeries("Darker than Black Vol 1"), - // EntityFactory.CreateSeries("Darker than Black"), - // EntityFactory.CreateSeries("Beastars"), - // }; - // var missingSeries = new List() - // { - // EntityFactory.CreateSeries("Darker than Black Vol 1"), - // }; - // existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList(); - // - // Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name)); - // Assert.Equal(missingSeries.Count, removeCount); - // } - - - // TODO: I want a test for UpdateSeries where if I have chapter 10 and now it's mapping into Vol 2 Chapter 10, - // if I can do it without deleting the underlying chapter (aka id change) + NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"), + Metadata = new SeriesMetadata(), + Format = MangaFormat.Epub + } + }; + Assert.Equal(1, ScannerService.FindSeriesNotOnDisk(existingSeries, infos).Count()); } + + [Fact] + public void FindSeriesNotOnDisk_Should_RemoveNothing_Test() + { + var infos = new Dictionary>(); + + ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive}); + ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive}); + ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive}); + + var existingSeries = new List + { + new Series() + { + Name = "Cage of Eden", + LocalizedName = "Cage of Eden", + OriginalName = "Cage of Eden", + NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Cage of Eden"), + Metadata = new SeriesMetadata(), + Format = MangaFormat.Archive + }, + new Series() + { + Name = "Darker Than Black", + LocalizedName = "Darker Than Black", + OriginalName = "Darker Than Black", + NormalizedName = API.Services.Tasks.Scanner.Parser.Parser.Normalize("Darker Than Black"), + Metadata = new SeriesMetadata(), + Format = MangaFormat.Archive + } + }; + + + + Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos)); + } + + + // TODO: Figure out how to do this with ParseScannedFiles + // [Theory] + // [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] + // [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] + // [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")] + // [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] + // public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) + // { + // var collectedSeries = new ConcurrentDictionary>(); + // foreach (var seriesName in existingSeriesNames) + // { + // AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive}); + // } + // + // var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo() + // { + // Series = parsedInfoName, + // Format = MangaFormat.Archive + // }); + // + // Assert.Equal(expected, actualName); + // } + + // [Fact] + // public void RemoveMissingSeries_Should_RemoveSeries() + // { + // var existingSeries = new List() + // { + // EntityFactory.CreateSeries("Darker than Black Vol 1"), + // EntityFactory.CreateSeries("Darker than Black"), + // EntityFactory.CreateSeries("Beastars"), + // }; + // var missingSeries = new List() + // { + // EntityFactory.CreateSeries("Darker than Black Vol 1"), + // }; + // existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList(); + // + // Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name)); + // Assert.Equal(missingSeries.Count, removeCount); + // } + + + // TODO: I want a test for UpdateSeries where if I have chapter 10 and now it's mapping into Vol 2 Chapter 10, + // if I can do it without deleting the underlying chapter (aka id change) + } diff --git a/API.Tests/Services/SeriesServiceTests.cs b/API.Tests/Services/SeriesServiceTests.cs index 82ebd2197..8307136b7 100644 --- a/API.Tests/Services/SeriesServiceTests.cs +++ b/API.Tests/Services/SeriesServiceTests.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Data.Common; using System.IO.Abstractions.TestingHelpers; using System.Linq; @@ -8,9 +9,10 @@ using API.Data.Repositories; using API.DTOs; using API.DTOs.CollectionTags; using API.DTOs.Metadata; -using API.DTOs.Reader; +using API.DTOs.SeriesDetail; using API.Entities; using API.Entities.Enums; +using API.Entities.Metadata; using API.Extensions; using API.Helpers; using API.Services; @@ -22,10 +24,7 @@ using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.Extensions.Logging; using NSubstitute; -using NSubstitute.Extensions; -using NSubstitute.ReceivedExtensions; using Xunit; -using Xunit.Sdk; namespace API.Tests.Services; @@ -85,19 +84,19 @@ public class SeriesServiceTests _context.ServerSetting.Update(setting); - var lib = new Library() - { - Name = "Manga", Folders = new List() {new FolderPath() {Path = "C:/data/"}} - }; - - _context.AppUser.Add(new AppUser() - { - UserName = "majora2007", - Libraries = new List() - { - lib - } - }); + // var lib = new Library() + // { + // Name = "Manga", Folders = new List() {new FolderPath() {Path = "C:/data/"}} + // }; + // + // _context.AppUser.Add(new AppUser() + // { + // UserName = "majora2007", + // Libraries = new List() + // { + // lib + // } + // }); return await _context.SaveChangesAsync() > 0; } @@ -109,6 +108,7 @@ public class SeriesServiceTests _context.Genre.RemoveRange(_context.Genre.ToList()); _context.CollectionTag.RemoveRange(_context.CollectionTag.ToList()); _context.Person.RemoveRange(_context.Person.ToList()); + _context.Library.RemoveRange(_context.Library.ToList()); await _context.SaveChangesAsync(); } @@ -126,6 +126,26 @@ public class SeriesServiceTests return fileSystem; } + private static UpdateRelatedSeriesDto CreateRelationsDto(Series series) + { + return new UpdateRelatedSeriesDto() + { + SeriesId = series.Id, + Prequels = new List(), + Adaptations = new List(), + Characters = new List(), + Contains = new List(), + Doujinshis = new List(), + Others = new List(), + Sequels = new List(), + AlternativeSettings = new List(), + AlternativeVersions = new List(), + SideStories = new List(), + SpinOffs = new List(), + Editions = new List() + }; + } + #endregion #region SeriesDetail @@ -135,33 +155,45 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("0", new List() + new AppUser() { - EntityFactory.CreateChapter("Omake", true, new List()), - EntityFactory.CreateChapter("Something SP02", true, new List()), - }), - EntityFactory.CreateVolume("2", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("21", false, new List()), - EntityFactory.CreateChapter("22", false, new List()), - }), - EntityFactory.CreateVolume("3", new List() - { - EntityFactory.CreateChapter("31", false, new List()), - EntityFactory.CreateChapter("32", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("Omake", true, new List()), + EntityFactory.CreateChapter("Something SP02", true, new List()), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("21", false, new List()), + EntityFactory.CreateChapter("22", false, new List()), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List()), + EntityFactory.CreateChapter("32", false, new List()), + }), + } + } } }); + await _context.SaveChangesAsync(); var expectedRanges = new[] {"Omake", "Something SP02"}; @@ -177,30 +209,41 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("0", new List() + new AppUser() { - EntityFactory.CreateChapter("1", false, new List()), - EntityFactory.CreateChapter("2", false, new List()), - }), - EntityFactory.CreateVolume("2", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("21", false, new List()), - EntityFactory.CreateChapter("22", false, new List()), - }), - EntityFactory.CreateVolume("3", new List() - { - EntityFactory.CreateChapter("31", false, new List()), - EntityFactory.CreateChapter("32", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("1", false, new List()), + EntityFactory.CreateChapter("2", false, new List()), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("21", false, new List()), + EntityFactory.CreateChapter("22", false, new List()), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List()), + EntityFactory.CreateChapter("32", false, new List()), + }), + } + } } }); @@ -220,28 +263,39 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("0", new List() + new AppUser() { - EntityFactory.CreateChapter("1", false, new List()), - EntityFactory.CreateChapter("2", false, new List()), - }), - EntityFactory.CreateVolume("2", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("0", false, new List()), - }), - EntityFactory.CreateVolume("3", new List() - { - EntityFactory.CreateChapter("31", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("1", false, new List()), + EntityFactory.CreateChapter("2", false, new List()), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List()), + }), + } + } } }); @@ -261,28 +315,39 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("0", new List() + new AppUser() { - EntityFactory.CreateChapter("1", false, new List()), - EntityFactory.CreateChapter("2", false, new List()), - }), - EntityFactory.CreateVolume("2", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("0", false, new List()), - }), - EntityFactory.CreateVolume("3", new List() - { - EntityFactory.CreateChapter("31", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("1", false, new List()), + EntityFactory.CreateChapter("2", false, new List()), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List()), + }), + } + } } }); @@ -305,26 +370,38 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Book, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("2", new List() + new AppUser() { - EntityFactory.CreateChapter("0", false, new List()), - }), - EntityFactory.CreateVolume("3", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("0", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + } + } } }); + await _context.SaveChangesAsync(); var detail = await _seriesService.GetSeriesDetail(1, 1); @@ -339,26 +416,39 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Book, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("0", new List() + new AppUser() { - EntityFactory.CreateChapter("Ano Orokamono ni mo Kyakkou wo! - Volume 1.epub", true, new List()), - }), - EntityFactory.CreateVolume("2", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("Ano Orokamono ni mo Kyakkou wo! - Volume 2.epub", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("Ano Orokamono ni mo Kyakkou wo! - Volume 1.epub", true, new List()), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("Ano Orokamono ni mo Kyakkou wo! - Volume 2.epub", false, new List()), + }), + } + } } }); + + await _context.SaveChangesAsync(); var detail = await _seriesService.GetSeriesDetail(1, 1); @@ -379,36 +469,48 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Book, - }, - Volumes = new List() + AppUsers = new List() { - EntityFactory.CreateVolume("2", new List() + new AppUser() { - EntityFactory.CreateChapter("0", false, new List()), - }), - EntityFactory.CreateVolume("1.2", new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() { - EntityFactory.CreateChapter("0", false, new List()), - }), - EntityFactory.CreateVolume("1", new List() - { - EntityFactory.CreateChapter("0", false, new List()), - }), + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + EntityFactory.CreateVolume("1.2", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("0", false, new List()), + }), + } + } } }); + await _context.SaveChangesAsync(); var detail = await _seriesService.GetSeriesDetail(1, 1); - Assert.Equal("1", detail.Volumes.ElementAt(0).Name); - Assert.Equal("1.2", detail.Volumes.ElementAt(1).Name); - Assert.Equal("2", detail.Volumes.ElementAt(2).Name); + Assert.Equal("Volume 1", detail.Volumes.ElementAt(0).Name); + Assert.Equal("Volume 1.2", detail.Volumes.ElementAt(1).Name); + Assert.Equal("Volume 2", detail.Volumes.ElementAt(2).Name); } @@ -422,28 +524,34 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - new Volume() + new AppUser() { - Chapters = new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() + { + Name = "Test", + Volumes = new List() { - new Chapter() + EntityFactory.CreateVolume("1", new List() { - Pages = 1 - } + EntityFactory.CreateChapter("1", false, new List(), 1), + }), } } } }); + await _context.SaveChangesAsync(); @@ -470,23 +578,28 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - new Volume() + new AppUser() { - Chapters = new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() + { + Name = "Test", + Volumes = new List() { - new Chapter() + EntityFactory.CreateVolume("1", new List() { - Pages = 1 - } + EntityFactory.CreateChapter("1", false, new List(), 1), + }), } } } @@ -536,23 +649,28 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - new Volume() + new AppUser() { - Chapters = new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() + { + Name = "Test", + Volumes = new List() { - new Chapter() + EntityFactory.CreateVolume("1", new List() { - Pages = 1 - } + EntityFactory.CreateChapter("1", false, new List(), 1), + }), } } } @@ -583,23 +701,28 @@ public class SeriesServiceTests { await ResetDb(); - _context.Series.Add(new Series() + _context.Library.Add(new Library() { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Manga, - }, - Volumes = new List() + AppUsers = new List() { - new Volume() + new AppUser() { - Chapters = new List() + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() + { + new Series() + { + Name = "Test", + Volumes = new List() { - new Chapter() + EntityFactory.CreateVolume("1", new List() { - Pages = 1 - } + EntityFactory.CreateChapter("1", false, new List(), 1), + }), } } } @@ -626,18 +749,6 @@ public class SeriesServiceTests #region UpdateSeriesMetadata - private void SetupUpdateSeriesMetadataDb() - { - _context.Series.Add(new Series() - { - Name = "Test", - Library = new Library() { - Name = "Test LIb", - Type = LibraryType.Book, - } - }); - } - [Fact] public async Task UpdateSeriesMetadata_ShouldCreateEmptyMetadata_IfDoesntExist() { @@ -909,6 +1020,41 @@ public class SeriesServiceTests Assert.True(series.Metadata.GenresLocked); } + [Fact] + public async Task UpdateSeriesMetadata_ShouldNotUpdateReleaseYear_IfLessThan1000() + { + await ResetDb(); + var s = new Series() + { + Name = "Test", + Library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Book, + }, + Metadata = DbFactory.SeriesMetadata(new List()) + }; + _context.Series.Add(s); + await _context.SaveChangesAsync(); + + var success = await _seriesService.UpdateSeriesMetadata(new UpdateSeriesMetadataDto() + { + SeriesMetadata = new SeriesMetadataDto() + { + SeriesId = 1, + ReleaseYear = 100, + }, + CollectionTags = new List() + }); + + Assert.True(success); + + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1); + Assert.NotNull(series.Metadata); + Assert.Equal(0, series.Metadata.ReleaseYear); + Assert.False(series.Metadata.ReleaseYearLocked); + } + #endregion #region GetFirstChapterForMetadata @@ -992,4 +1138,388 @@ public class SeriesServiceTests } #endregion + + #region SeriesRelation + [Fact] + public async Task UpdateRelatedSeries_ShouldAddAllRelations() + { + await ResetDb(); + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Prequels", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Sequels", + Volumes = new List(){} + } + } + }); + + await _context.SaveChangesAsync(); + + var series1 = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Related); + // Add relations + var addRelationDto = CreateRelationsDto(series1); + addRelationDto.Adaptations.Add(2); + addRelationDto.Sequels.Add(3); + await _seriesService.UpdateRelatedSeries(addRelationDto); + Assert.Equal(2, series1.Relations.Single(s => s.TargetSeriesId == 2).TargetSeriesId); + Assert.Equal(3, series1.Relations.Single(s => s.TargetSeriesId == 3).TargetSeriesId); + } + + [Fact] + public async Task UpdateRelatedSeries_DeleteAllRelations() + { + await ResetDb(); + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Prequels", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Sequels", + Volumes = new List(){} + } + } + }); + + await _context.SaveChangesAsync(); + + var series1 = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Related); + // Add relations + var addRelationDto = CreateRelationsDto(series1); + addRelationDto.Adaptations.Add(2); + addRelationDto.Sequels.Add(3); + await _seriesService.UpdateRelatedSeries(addRelationDto); + Assert.Equal(2, series1.Relations.Single(s => s.TargetSeriesId == 2).TargetSeriesId); + Assert.Equal(3, series1.Relations.Single(s => s.TargetSeriesId == 3).TargetSeriesId); + + // Remove relations + var removeRelationDto = CreateRelationsDto(series1); + await _seriesService.UpdateRelatedSeries(removeRelationDto); + Assert.Empty(series1.Relations.Where(s => s.TargetSeriesId == 1)); + Assert.Empty(series1.Relations.Where(s => s.TargetSeriesId == 2)); + } + + [Fact] + public async Task UpdateRelatedSeries_ShouldNotAllowDuplicates() + { + await ResetDb(); + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Prequels", + Volumes = new List(){} + } + } + }); + + await _context.SaveChangesAsync(); + + var series1 = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Related); + var relation = new SeriesRelation() + { + Series = series1, + SeriesId = series1.Id, + TargetSeriesId = 2, // Target series id + RelationKind = RelationKind.Prequel + + }; + // Manually create a relation + series1.Relations.Add(relation); + + // Create a new dto with the previous relation as well + var relationDto = CreateRelationsDto(series1); + relationDto.Adaptations.Add(2); + + await _seriesService.UpdateRelatedSeries(relationDto); + // Expected is only one instance of the relation (hence not duping) + Assert.Equal(2, series1.Relations.Single(s => s.TargetSeriesId == 2).TargetSeriesId); + } + + [Fact] + public async Task GetRelatedSeries_EditionPrequelSequel_ShouldNotHaveParent() + { + await ResetDb(); + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Editions", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Prequels", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Sequels", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Adaption", + Volumes = new List(){} + } + } + }); + await _context.SaveChangesAsync(); + var series1 = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Related); + // Add relations + var addRelationDto = CreateRelationsDto(series1); + addRelationDto.Editions.Add(2); + addRelationDto.Prequels.Add(3); + addRelationDto.Sequels.Add(4); + addRelationDto.Adaptations.Add(5); + await _seriesService.UpdateRelatedSeries(addRelationDto); + + + Assert.Empty(_seriesService.GetRelatedSeries(1, 2).Result.Parent); + Assert.Empty(_seriesService.GetRelatedSeries(1, 3).Result.Parent); + Assert.Empty(_seriesService.GetRelatedSeries(1, 4).Result.Parent); + Assert.NotEmpty(_seriesService.GetRelatedSeries(1, 5).Result.Parent); + } + + [Fact] + public async Task SeriesRelation_ShouldAllowDeleteOnLibrary() + { + await ResetDb(); + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Prequels", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Sequels", + Volumes = new List(){} + } + } + }); + + await _context.SaveChangesAsync(); + + var series1 = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Related); + // Add relations + var addRelationDto = CreateRelationsDto(series1); + addRelationDto.Adaptations.Add(2); + addRelationDto.Sequels.Add(3); + await _seriesService.UpdateRelatedSeries(addRelationDto); + + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1); + _unitOfWork.LibraryRepository.Delete(library); + + try + { + await _unitOfWork.CommitAsync(); + } + catch (Exception) + { + Assert.False(true); + } + + Assert.Null(await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1)); + } + + [Fact] + public async Task SeriesRelation_ShouldAllowDeleteOnLibrary_WhenSeriesCrossLibraries() + { + await ResetDb(); + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series", + Volumes = new List() + { + new Volume() + { + Chapters = new List() + { + new Chapter() + { + Files = new List() + { + new MangaFile() + { + Pages = 1, + FilePath = "fake file" + } + } + } + } + } + } + }, + new Series() + { + Name = "Test Series Prequels", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Sequels", + Volumes = new List(){} + } + } + }); + + _context.Library.Add(new Library() + { + AppUsers = new List() + { + new AppUser() + { + UserName = "majora2007" + } + }, + Name = "Test LIb 2", + Type = LibraryType.Book, + Series = new List() + { + new Series() + { + Name = "Test Series 2", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Prequels 2", + Volumes = new List(){} + }, + new Series() + { + Name = "Test Series Sequels 2", + Volumes = new List(){} + } + } + }); + + await _context.SaveChangesAsync(); + + var series1 = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1, SeriesIncludes.Related); + // Add relations + var addRelationDto = CreateRelationsDto(series1); + addRelationDto.Adaptations.Add(4); // cross library link + await _seriesService.UpdateRelatedSeries(addRelationDto); + + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1, LibraryIncludes.Series); + _unitOfWork.LibraryRepository.Delete(library); + + try + { + await _unitOfWork.CommitAsync(); + } + catch (Exception) + { + Assert.False(true); + } + + Assert.Null(await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1)); + } + + #endregion } diff --git a/API.Tests/Services/TachiyomiServiceTests.cs b/API.Tests/Services/TachiyomiServiceTests.cs new file mode 100644 index 000000000..f623890d6 --- /dev/null +++ b/API.Tests/Services/TachiyomiServiceTests.cs @@ -0,0 +1,733 @@ +namespace API.Tests.Services; +using System.Collections.Generic; +using System.Data.Common; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using Data; +using Data.Repositories; +using API.Entities; +using API.Entities.Enums; +using API.Helpers; +using API.Services; +using SignalR; +using Helpers; +using AutoMapper; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +public class TachiyomiServiceTests +{ + private readonly IUnitOfWork _unitOfWork; + private readonly IMapper _mapper; + private readonly DataContext _context; + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string DataDirectory = "C:/data/"; + + + public TachiyomiServiceTests() + { + var contextOptions = new DbContextOptionsBuilder().UseSqlite(CreateInMemoryDatabase()).Options; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + var config = new MapperConfiguration(cfg => cfg.AddProfile()); + _mapper = config.CreateMapper(); + _unitOfWork = new UnitOfWork(_context, _mapper, null); + + + } + + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, + new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", Folders = new List() {new FolderPath() {Path = "C:/data/"}} + }); + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDb() + { + _context.Series.RemoveRange(_context.Series.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(DataDirectory); + + return fileSystem; + } + + + + #endregion + + + #region GetLatestChapter + + [Fact] + public async Task GetLatestChapter_ShouldReturnChapter_NoProgress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", true, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("3", false, new List(), 1), + EntityFactory.CreateChapter("4", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + + Assert.Null(latestChapter); + } + + [Fact] + public async Task GetLatestChapter_ShouldReturnMaxChapter_CompletelyRead() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", true, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("3", false, new List(), 1), + EntityFactory.CreateChapter("4", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + await readerService.MarkSeriesAsRead(user,1); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + + Assert.Equal("96", latestChapter.Number); + } + + [Fact] + public async Task GetLatestChapter_ShouldReturnHighestChapter_Progress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", false, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("21", false, new List(), 1), + EntityFactory.CreateChapter("23", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + await tachiyomiService.MarkChaptersUntilAsRead(user,1,21); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + + Assert.Equal("21", latestChapter.Number); + } + [Fact] + public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", true, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("21", false, new List(), 1), + EntityFactory.CreateChapter("23", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + + await tachiyomiService.MarkChaptersUntilAsRead(user,1,1/10_000F); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + Assert.Equal("0.0001", latestChapter.Number); + } + + [Fact] + public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress2() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("0", false, new List(), 199), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("0", false, new List(), 192), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("0", false, new List(), 255), + }), + }, + Pages = 646 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + + await readerService.MarkSeriesAsRead(user, 1); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + Assert.Equal("0.0003", latestChapter.Number); + } + + + [Fact] + public async Task GetLatestChapter_ShouldReturnEncodedYearlyVolume_Progress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1997", new List() + { + EntityFactory.CreateChapter("1", false, new List(), 1), + }), + EntityFactory.CreateVolume("2002", new List() + { + EntityFactory.CreateChapter("2", false, new List(), 1), + }), + EntityFactory.CreateVolume("2005", new List() + { + EntityFactory.CreateChapter("3", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Comic, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + + await tachiyomiService.MarkChaptersUntilAsRead(user,1,2002/10_000F); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + Assert.Equal("0.2002", latestChapter.Number); + } + + #endregion + + + #region MarkChaptersUntilAsRead + + [Fact] + public async Task MarkChaptersUntilAsRead_ShouldReturnChapter_NoProgress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", true, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("3", false, new List(), 1), + EntityFactory.CreateChapter("4", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + + Assert.Null(latestChapter); + } + [Fact] + public async Task MarkChaptersUntilAsRead_ShouldReturnMaxChapter_CompletelyRead() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", true, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("3", false, new List(), 1), + EntityFactory.CreateChapter("4", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + await readerService.MarkSeriesAsRead(user,1); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + + Assert.Equal("96", latestChapter.Number); + } + + [Fact] + public async Task MarkChaptersUntilAsRead_ShouldReturnHighestChapter_Progress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", false, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("21", false, new List(), 1), + EntityFactory.CreateChapter("23", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + await tachiyomiService.MarkChaptersUntilAsRead(user,1,21); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + + Assert.Equal("21", latestChapter.Number); + } + [Fact] + public async Task MarkChaptersUntilAsRead_ShouldReturnEncodedVolume_Progress() + { + await ResetDb(); + + var series = new Series + { + Name = "Test", + Volumes = new List() + { + EntityFactory.CreateVolume("0", new List() + { + EntityFactory.CreateChapter("95", false, new List(), 1), + EntityFactory.CreateChapter("96", false, new List(), 1), + }), + EntityFactory.CreateVolume("1", new List() + { + EntityFactory.CreateChapter("1", true, new List(), 1), + }), + EntityFactory.CreateVolume("2", new List() + { + EntityFactory.CreateChapter("21", false, new List(), 1), + EntityFactory.CreateChapter("23", false, new List(), 1), + }), + EntityFactory.CreateVolume("3", new List() + { + EntityFactory.CreateChapter("31", false, new List(), 1), + EntityFactory.CreateChapter("32", false, new List(), 1), + }), + }, + Pages = 7 + }; + var library = new Library() + { + Name = "Test LIb", + Type = LibraryType.Manga, + Series = new List() { series } + }; + + _context.AppUser.Add(new AppUser() + { + UserName = "majora2007", + Libraries = new List() + { + library + } + + }); + await _context.SaveChangesAsync(); + + var readerService = new ReaderService(_unitOfWork, Substitute.For>(), Substitute.For()); + var tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For>(), readerService); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress); + + await tachiyomiService.MarkChaptersUntilAsRead(user,1,1/10_000F); + + await _context.SaveChangesAsync(); + + + var latestChapter = await tachiyomiService.GetLatestChapter(1, 1); + Assert.Equal("0.0001", latestChapter.Number); + } + + #endregion + +} diff --git a/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo2.zip b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo2.zip new file mode 100644 index 000000000..b327b62d1 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo2.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_duplicateInfos.rar b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_duplicateInfos.rar new file mode 100644 index 000000000..9d5bab5a2 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_duplicateInfos.rar differ diff --git a/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_duplicateInfos_reversed.zip b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_duplicateInfos_reversed.zip new file mode 100644 index 000000000..f764a4a60 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_duplicateInfos_reversed.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_outside_root.zip b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_outside_root.zip new file mode 100644 index 000000000..9caea5bf1 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/ComicInfos/ComicInfo_outside_root.zip differ diff --git a/API/API.csproj b/API/API.csproj index d7920b926..4504e7804 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -6,6 +6,8 @@ true Linux true + true + true @@ -45,43 +47,52 @@ - + - - + + - + - - - + + + - + all runtime; build; native; contentfiles; analyzers; buildtransitive - - - + + + - + + + + + + + + + + - + all runtime; build; native; contentfiles; analyzers; buildtransitive - - - + + + diff --git a/API/Archive/ArchiveLibrary.cs b/API/Archive/ArchiveLibrary.cs index 2d87e24b6..721a37113 100644 --- a/API/Archive/ArchiveLibrary.cs +++ b/API/Archive/ArchiveLibrary.cs @@ -1,21 +1,20 @@ -namespace API.Archive +namespace API.Archive; + +/// +/// Represents which library should handle opening this library +/// +public enum ArchiveLibrary { /// - /// Represents which library should handle opening this library + /// The underlying archive cannot be opened /// - public enum ArchiveLibrary - { - /// - /// The underlying archive cannot be opened - /// - NotSupported = 0, - /// - /// The underlying archive can be opened by SharpCompress - /// - SharpCompress = 1, - /// - /// The underlying archive can be opened by default .NET - /// - Default = 2 - } + NotSupported = 0, + /// + /// The underlying archive can be opened by SharpCompress + /// + SharpCompress = 1, + /// + /// The underlying archive can be opened by default .NET + /// + Default = 2 } diff --git a/API/Comparators/ChapterSortComparer.cs b/API/Comparators/ChapterSortComparer.cs index ca55381bc..599310514 100644 --- a/API/Comparators/ChapterSortComparer.cs +++ b/API/Comparators/ChapterSortComparer.cs @@ -1,66 +1,65 @@ using System.Collections.Generic; -namespace API.Comparators +namespace API.Comparators; + +/// +/// Sorts chapters based on their Number. Uses natural ordering of doubles. +/// +public class ChapterSortComparer : IComparer { /// - /// Sorts chapters based on their Number. Uses natural ordering of doubles. + /// Normal sort for 2 doubles. 0 always comes last /// - public class ChapterSortComparer : IComparer + /// + /// + /// + public int Compare(double x, double y) { - /// - /// Normal sort for 2 doubles. 0 always comes last - /// - /// - /// - /// - public int Compare(double x, double y) - { - if (x == 0.0 && y == 0.0) return 0; - // if x is 0, it comes second - if (x == 0.0) return 1; - // if y is 0, it comes second - if (y == 0.0) return -1; + if (x == 0.0 && y == 0.0) return 0; + // if x is 0, it comes second + if (x == 0.0) return 1; + // if y is 0, it comes second + if (y == 0.0) return -1; - return x.CompareTo(y); - } - - public static readonly ChapterSortComparer Default = new ChapterSortComparer(); + return x.CompareTo(y); } - /// - /// This is a special case comparer used exclusively for sorting chapters within a single Volume for reading order. - /// - /// Volume 10 has "Series - Vol 10" and "Series - Vol 10 Chapter 81". In this case, for reading order, the order is Vol 10, Vol 10 Chapter 81. - /// This is represented by Chapter 0, Chapter 81. - /// - /// - public class ChapterSortComparerZeroFirst : IComparer + public static readonly ChapterSortComparer Default = new ChapterSortComparer(); +} + +/// +/// This is a special case comparer used exclusively for sorting chapters within a single Volume for reading order. +/// +/// Volume 10 has "Series - Vol 10" and "Series - Vol 10 Chapter 81". In this case, for reading order, the order is Vol 10, Vol 10 Chapter 81. +/// This is represented by Chapter 0, Chapter 81. +/// +/// +public class ChapterSortComparerZeroFirst : IComparer +{ + public int Compare(double x, double y) { - public int Compare(double x, double y) - { - if (x == 0.0 && y == 0.0) return 0; - // if x is 0, it comes first - if (x == 0.0) return -1; - // if y is 0, it comes first - if (y == 0.0) return 1; + if (x == 0.0 && y == 0.0) return 0; + // if x is 0, it comes first + if (x == 0.0) return -1; + // if y is 0, it comes first + if (y == 0.0) return 1; - return x.CompareTo(y); - } - - public static readonly ChapterSortComparerZeroFirst Default = new ChapterSortComparerZeroFirst(); + return x.CompareTo(y); } - public class SortComparerZeroLast : IComparer - { - public int Compare(double x, double y) - { - if (x == 0.0 && y == 0.0) return 0; - // if x is 0, it comes last - if (x == 0.0) return 1; - // if y is 0, it comes last - if (y == 0.0) return -1; + public static readonly ChapterSortComparerZeroFirst Default = new ChapterSortComparerZeroFirst(); +} - return x.CompareTo(y); - } +public class SortComparerZeroLast : IComparer +{ + public int Compare(double x, double y) + { + if (x == 0.0 && y == 0.0) return 0; + // if x is 0, it comes last + if (x == 0.0) return 1; + // if y is 0, it comes last + if (y == 0.0) return -1; + + return x.CompareTo(y); } } diff --git a/API/Comparators/NumericComparer.cs b/API/Comparators/NumericComparer.cs index b40e33e0a..ae603e71b 100644 --- a/API/Comparators/NumericComparer.cs +++ b/API/Comparators/NumericComparer.cs @@ -1,17 +1,16 @@ using System.Collections; -namespace API.Comparators -{ - public class NumericComparer : IComparer - { +namespace API.Comparators; - public int Compare(object x, object y) +public class NumericComparer : IComparer +{ + + public int Compare(object x, object y) + { + if((x is string xs) && (y is string ys)) { - if((x is string xs) && (y is string ys)) - { - return StringLogicalComparer.Compare(xs, ys); - } - return -1; + return StringLogicalComparer.Compare(xs, ys); } + return -1; } -} \ No newline at end of file +} diff --git a/API/Comparators/StringLogicalComparer.cs b/API/Comparators/StringLogicalComparer.cs index 67aa72225..805f85623 100644 --- a/API/Comparators/StringLogicalComparer.cs +++ b/API/Comparators/StringLogicalComparer.cs @@ -4,127 +4,126 @@ using static System.Char; -namespace API.Comparators +namespace API.Comparators; + +public static class StringLogicalComparer { - public static class StringLogicalComparer + public static int Compare(string s1, string s2) { - public static int Compare(string s1, string s2) - { - //get rid of special cases - if((s1 == null) && (s2 == null)) return 0; - if(s1 == null) return -1; - if(s2 == null) return 1; + //get rid of special cases + if((s1 == null) && (s2 == null)) return 0; + if(s1 == null) return -1; + if(s2 == null) return 1; - if (string.IsNullOrEmpty(s1) && string.IsNullOrEmpty(s2)) return 0; - if (string.IsNullOrEmpty(s1)) return -1; - if (string.IsNullOrEmpty(s2)) return -1; + if (string.IsNullOrEmpty(s1) && string.IsNullOrEmpty(s2)) return 0; + if (string.IsNullOrEmpty(s1)) return -1; + if (string.IsNullOrEmpty(s2)) return -1; - //WE style, special case - var sp1 = IsLetterOrDigit(s1, 0); - var sp2 = IsLetterOrDigit(s2, 0); - if(sp1 && !sp2) return 1; - if(!sp1 && sp2) return -1; + //WE style, special case + var sp1 = IsLetterOrDigit(s1, 0); + var sp2 = IsLetterOrDigit(s2, 0); + if(sp1 && !sp2) return 1; + if(!sp1 && sp2) return -1; - int i1 = 0, i2 = 0; //current index - while(true) - { - var c1 = IsDigit(s1, i1); - var c2 = IsDigit(s2, i2); - int r; // temp result - if(!c1 && !c2) - { - bool letter1 = IsLetter(s1, i1); - bool letter2 = IsLetter(s2, i2); - if((letter1 && letter2) || (!letter1 && !letter2)) - { - if(letter1 && letter2) - { - r = ToLower(s1[i1]).CompareTo(ToLower(s2[i2])); - } - else - { - r = s1[i1].CompareTo(s2[i2]); - } - if(r != 0) return r; - } - else if(!letter1 && letter2) return -1; - else if(letter1 && !letter2) return 1; - } - else if(c1 && c2) - { - r = CompareNum(s1, ref i1, s2, ref i2); - if(r != 0) return r; - } - else if(c1) - { - return -1; - } - else if(c2) - { - return 1; - } - i1++; - i2++; - if((i1 >= s1.Length) && (i2 >= s2.Length)) - { - return 0; - } - if(i1 >= s1.Length) - { - return -1; - } - if(i2 >= s2.Length) - { - return -1; - } - } - } - - private static int CompareNum(string s1, ref int i1, string s2, ref int i2) - { - int nzStart1 = i1, nzStart2 = i2; // nz = non zero - int end1 = i1, end2 = i2; - - ScanNumEnd(s1, i1, ref end1, ref nzStart1); - ScanNumEnd(s2, i2, ref end2, ref nzStart2); - var start1 = i1; i1 = end1 - 1; - var start2 = i2; i2 = end2 - 1; - - var nzLength1 = end1 - nzStart1; - var nzLength2 = end2 - nzStart2; - - if(nzLength1 < nzLength2) return -1; - if(nzLength1 > nzLength2) return 1; - - for(int j1 = nzStart1,j2 = nzStart2; j1 <= i1; j1++,j2++) - { - var r = s1[j1].CompareTo(s2[j2]); - if(r != 0) return r; - } - // the nz parts are equal - var length1 = end1 - start1; - var length2 = end2 - start2; - if(length1 == length2) return 0; - if(length1 > length2) return -1; - return 1; - } - - //lookahead - private static void ScanNumEnd(string s, int start, ref int end, ref int nzStart) - { - nzStart = start; - end = start; - var countZeros = true; - while(IsDigit(s, end)) - { - if(countZeros && s[end].Equals('0')) - { - nzStart++; - } - else countZeros = false; - end++; - if(end >= s.Length) break; - } - } + int i1 = 0, i2 = 0; //current index + while(true) + { + var c1 = IsDigit(s1, i1); + var c2 = IsDigit(s2, i2); + int r; // temp result + if(!c1 && !c2) + { + bool letter1 = IsLetter(s1, i1); + bool letter2 = IsLetter(s2, i2); + if((letter1 && letter2) || (!letter1 && !letter2)) + { + if(letter1 && letter2) + { + r = ToLower(s1[i1]).CompareTo(ToLower(s2[i2])); + } + else + { + r = s1[i1].CompareTo(s2[i2]); + } + if(r != 0) return r; + } + else if(!letter1 && letter2) return -1; + else if(letter1 && !letter2) return 1; + } + else if(c1 && c2) + { + r = CompareNum(s1, ref i1, s2, ref i2); + if(r != 0) return r; + } + else if(c1) + { + return -1; + } + else if(c2) + { + return 1; + } + i1++; + i2++; + if((i1 >= s1.Length) && (i2 >= s2.Length)) + { + return 0; + } + if(i1 >= s1.Length) + { + return -1; + } + if(i2 >= s2.Length) + { + return -1; + } + } } -} \ No newline at end of file + + private static int CompareNum(string s1, ref int i1, string s2, ref int i2) + { + int nzStart1 = i1, nzStart2 = i2; // nz = non zero + int end1 = i1, end2 = i2; + + ScanNumEnd(s1, i1, ref end1, ref nzStart1); + ScanNumEnd(s2, i2, ref end2, ref nzStart2); + var start1 = i1; i1 = end1 - 1; + var start2 = i2; i2 = end2 - 1; + + var nzLength1 = end1 - nzStart1; + var nzLength2 = end2 - nzStart2; + + if(nzLength1 < nzLength2) return -1; + if(nzLength1 > nzLength2) return 1; + + for(int j1 = nzStart1,j2 = nzStart2; j1 <= i1; j1++,j2++) + { + var r = s1[j1].CompareTo(s2[j2]); + if(r != 0) return r; + } + // the nz parts are equal + var length1 = end1 - start1; + var length2 = end2 - start2; + if(length1 == length2) return 0; + if(length1 > length2) return -1; + return 1; + } + + //lookahead + private static void ScanNumEnd(string s, int start, ref int end, ref int nzStart) + { + nzStart = start; + end = start; + var countZeros = true; + while(IsDigit(s, end)) + { + if(countZeros && s[end].Equals('0')) + { + nzStart++; + } + else countZeros = false; + end++; + if(end >= s.Length) break; + } + } +} diff --git a/API/Constants/PolicyConstants.cs b/API/Constants/PolicyConstants.cs index 8e8a2bc5d..546ad4158 100644 --- a/API/Constants/PolicyConstants.cs +++ b/API/Constants/PolicyConstants.cs @@ -1,30 +1,37 @@ using System.Collections.Immutable; -namespace API.Constants +namespace API.Constants; + +/// +/// Role-based Security +/// +public static class PolicyConstants { /// - /// Role-based Security + /// Admin User. Has all privileges /// - public static class PolicyConstants - { - /// - /// Admin User. Has all privileges - /// - public const string AdminRole = "Admin"; - /// - /// Non-Admin User. Must be granted privileges by an Admin. - /// - public const string PlebRole = "Pleb"; - /// - /// Used to give a user ability to download files from the server - /// - public const string DownloadRole = "Download"; - /// - /// Used to give a user ability to change their own password - /// - public const string ChangePasswordRole = "Change Password"; + public const string AdminRole = "Admin"; + /// + /// Non-Admin User. Must be granted privileges by an Admin. + /// + public const string PlebRole = "Pleb"; + /// + /// Used to give a user ability to download files from the server + /// + public const string DownloadRole = "Download"; + /// + /// Used to give a user ability to change their own password + /// + public const string ChangePasswordRole = "Change Password"; + /// + /// Used to give a user ability to bookmark files on the server + /// + public const string BookmarkRole = "Bookmark"; + /// + /// Used to give a user ability to Change Restrictions on their account + /// + public const string ChangeRestrictionRole = "Change Restriction"; - public static readonly ImmutableArray ValidRoles = - ImmutableArray.Create(AdminRole, PlebRole, DownloadRole, ChangePasswordRole); - } + public static readonly ImmutableArray ValidRoles = + ImmutableArray.Create(AdminRole, PlebRole, DownloadRole, ChangePasswordRole, BookmarkRole, ChangeRestrictionRole); } diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 8e549d5e1..b0d6c43ba 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -12,13 +12,13 @@ using API.DTOs.Account; using API.DTOs.Email; using API.Entities; using API.Entities.Enums; -using API.Entities.Enums.UserPreferences; using API.Errors; using API.Extensions; using API.Services; using API.SignalR; using AutoMapper; using Kavita.Common; +using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; @@ -26,388 +26,490 @@ using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; -namespace API.Controllers -{ - /// - /// All Account matters - /// - public class AccountController : BaseApiController - { - private readonly UserManager _userManager; - private readonly SignInManager _signInManager; - private readonly ITokenService _tokenService; - private readonly IUnitOfWork _unitOfWork; - private readonly ILogger _logger; - private readonly IMapper _mapper; - private readonly IAccountService _accountService; - private readonly IEmailService _emailService; - private readonly IHostEnvironment _environment; - private readonly IEventHub _eventHub; +namespace API.Controllers; - /// - public AccountController(UserManager userManager, - SignInManager signInManager, - ITokenService tokenService, IUnitOfWork unitOfWork, - ILogger logger, - IMapper mapper, IAccountService accountService, - IEmailService emailService, IHostEnvironment environment, - IEventHub eventHub) +/// +/// All Account matters +/// +public class AccountController : BaseApiController +{ + private readonly UserManager _userManager; + private readonly SignInManager _signInManager; + private readonly ITokenService _tokenService; + private readonly IUnitOfWork _unitOfWork; + private readonly ILogger _logger; + private readonly IMapper _mapper; + private readonly IAccountService _accountService; + private readonly IEmailService _emailService; + private readonly IHostEnvironment _environment; + private readonly IEventHub _eventHub; + + /// + public AccountController(UserManager userManager, + SignInManager signInManager, + ITokenService tokenService, IUnitOfWork unitOfWork, + ILogger logger, + IMapper mapper, IAccountService accountService, + IEmailService emailService, IHostEnvironment environment, + IEventHub eventHub) + { + _userManager = userManager; + _signInManager = signInManager; + _tokenService = tokenService; + _unitOfWork = unitOfWork; + _logger = logger; + _mapper = mapper; + _accountService = accountService; + _emailService = emailService; + _environment = environment; + _eventHub = eventHub; + } + + /// + /// Update a user's password + /// + /// + /// + [AllowAnonymous] + [HttpPost("reset-password")] + public async Task UpdatePassword(ResetPasswordDto resetPasswordDto) + { + // TODO: Log this request to Audit Table + _logger.LogInformation("{UserName} is changing {ResetUser}'s password", User.GetUsername(), resetPasswordDto.UserName); + + var user = await _userManager.Users.SingleOrDefaultAsync(x => x.UserName == resetPasswordDto.UserName); + if (user == null) return Ok(); // Don't report BadRequest as that would allow brute forcing to find accounts on system + var isAdmin = User.IsInRole(PolicyConstants.AdminRole); + + + if (resetPasswordDto.UserName == User.GetUsername() && !(User.IsInRole(PolicyConstants.ChangePasswordRole) || isAdmin)) + return Unauthorized("You are not permitted to this operation."); + + if (resetPasswordDto.UserName != User.GetUsername() && !isAdmin) + return Unauthorized("You are not permitted to this operation."); + + if (string.IsNullOrEmpty(resetPasswordDto.OldPassword) && !isAdmin) + return BadRequest(new ApiException(400, "You must enter your existing password to change your account unless you're an admin")); + + // If you're an admin and the username isn't yours, you don't need to validate the password + var isResettingOtherUser = (resetPasswordDto.UserName != User.GetUsername() && isAdmin); + if (!isResettingOtherUser && !await _userManager.CheckPasswordAsync(user, resetPasswordDto.OldPassword)) { - _userManager = userManager; - _signInManager = signInManager; - _tokenService = tokenService; - _unitOfWork = unitOfWork; - _logger = logger; - _mapper = mapper; - _accountService = accountService; - _emailService = emailService; - _environment = environment; - _eventHub = eventHub; + return BadRequest("Invalid Password"); } - /// - /// Update a user's password - /// - /// - /// - [AllowAnonymous] - [HttpPost("reset-password")] - public async Task UpdatePassword(ResetPasswordDto resetPasswordDto) + var errors = await _accountService.ChangeUserPassword(user, resetPasswordDto.Password); + if (errors.Any()) { - // TODO: Log this request to Audit Table - _logger.LogInformation("{UserName} is changing {ResetUser}'s password", User.GetUsername(), resetPasswordDto.UserName); + return BadRequest(errors); + } - var user = await _userManager.Users.SingleOrDefaultAsync(x => x.UserName == resetPasswordDto.UserName); - if (user == null) return Ok(); // Don't report BadRequest as that would allow brute forcing to find accounts on system - var isAdmin = User.IsInRole(PolicyConstants.AdminRole); + _logger.LogInformation("{User}'s Password has been reset", resetPasswordDto.UserName); + return Ok(); + } + /// + /// Register the first user (admin) on the server. Will not do anything if an admin is already confirmed + /// + /// + /// + [AllowAnonymous] + [HttpPost("register")] + public async Task> RegisterFirstUser(RegisterDto registerDto) + { + var admins = await _userManager.GetUsersInRoleAsync("Admin"); + if (admins.Count > 0) return BadRequest("Not allowed"); - if (resetPasswordDto.UserName == User.GetUsername() && !(User.IsInRole(PolicyConstants.ChangePasswordRole) || isAdmin)) - return Unauthorized("You are not permitted to this operation."); - - if (resetPasswordDto.UserName != User.GetUsername() && !isAdmin) - return Unauthorized("You are not permitted to this operation."); - - if (string.IsNullOrEmpty(resetPasswordDto.OldPassword) && !isAdmin) - return BadRequest(new ApiException(400, "You must enter your existing password to change your account unless you're an admin")); - - // If you're an admin and the username isn't yours, you don't need to validate the password - var isResettingOtherUser = (resetPasswordDto.UserName != User.GetUsername() && isAdmin); - if (!isResettingOtherUser && !await _userManager.CheckPasswordAsync(user, resetPasswordDto.OldPassword)) + try + { + var usernameValidation = await _accountService.ValidateUsername(registerDto.Username); + if (usernameValidation.Any()) { - return BadRequest("Invalid Password"); + return BadRequest(usernameValidation); } - var errors = await _accountService.ChangeUserPassword(user, resetPasswordDto.Password); - if (errors.Any()) + var user = new AppUser() { - return BadRequest(errors); + UserName = registerDto.Username, + Email = registerDto.Email, + UserPreferences = new AppUserPreferences + { + Theme = await _unitOfWork.SiteThemeRepository.GetDefaultTheme() + }, + ApiKey = HashUtil.ApiKey() + }; + + var result = await _userManager.CreateAsync(user, registerDto.Password); + if (!result.Succeeded) return BadRequest(result.Errors); + + var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); + if (string.IsNullOrEmpty(token)) return BadRequest("There was an issue generating a confirmation token."); + if (!await ConfirmEmailToken(token, user)) return BadRequest($"There was an issue validating your email: {token}"); + + + var roleResult = await _userManager.AddToRoleAsync(user, PolicyConstants.AdminRole); + if (!roleResult.Succeeded) return BadRequest(result.Errors); + + return new UserDto + { + Username = user.UserName, + Email = user.Email, + Token = await _tokenService.CreateToken(user), + RefreshToken = await _tokenService.CreateRefreshToken(user), + ApiKey = user.ApiKey, + Preferences = _mapper.Map(user.UserPreferences) + }; + } + catch (Exception ex) + { + _logger.LogError(ex, "Something went wrong when registering user"); + // We need to manually delete the User as we've already committed + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(registerDto.Username); + _unitOfWork.UserRepository.Delete(user); + await _unitOfWork.CommitAsync(); + } + + return BadRequest("Something went wrong when registering user"); + } + + + /// + /// Perform a login. Will send JWT Token of the logged in user back. + /// + /// + /// + [AllowAnonymous] + [HttpPost("login")] + public async Task> Login(LoginDto loginDto) + { + var user = await _userManager.Users + .Include(u => u.UserPreferences) + .SingleOrDefaultAsync(x => x.NormalizedUserName == loginDto.Username.ToUpper()); + + if (user == null) return Unauthorized("Invalid username"); + + var result = await _signInManager + .CheckPasswordSignInAsync(user, loginDto.Password, true); + + if (result.IsLockedOut) + { + return Unauthorized("You've been locked out from too many authorization attempts. Please wait 10 minutes."); + } + + if (!result.Succeeded) + { + return Unauthorized(result.IsNotAllowed ? "You must confirm your email first" : "Your credentials are not correct."); + } + + // Update LastActive on account + user.LastActive = DateTime.Now; + user.UserPreferences ??= new AppUserPreferences + { + Theme = await _unitOfWork.SiteThemeRepository.GetDefaultTheme() + }; + + _unitOfWork.UserRepository.Update(user); + await _unitOfWork.CommitAsync(); + + _logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive); + + var dto = _mapper.Map(user); + dto.Token = await _tokenService.CreateToken(user); + dto.RefreshToken = await _tokenService.CreateRefreshToken(user); + var pref = await _unitOfWork.UserRepository.GetPreferencesAsync(user.UserName); + pref.Theme ??= await _unitOfWork.SiteThemeRepository.GetDefaultTheme(); + dto.Preferences = _mapper.Map(pref); + return dto; + } + + /// + /// Refreshes the user's JWT token + /// + /// + /// + [AllowAnonymous] + [HttpPost("refresh-token")] + public async Task> RefreshToken([FromBody] TokenRequestDto tokenRequestDto) + { + var token = await _tokenService.ValidateRefreshToken(tokenRequestDto); + if (token == null) + { + return Unauthorized(new { message = "Invalid token" }); + } + + return Ok(token); + } + + /// + /// Get All Roles back. See + /// + /// + [HttpGet("roles")] + public ActionResult> GetRoles() + { + // TODO: This should be moved to ServerController + return typeof(PolicyConstants) + .GetFields(BindingFlags.Public | BindingFlags.Static) + .Where(f => f.FieldType == typeof(string)) + .ToDictionary(f => f.Name, + f => (string) f.GetValue(null)).Values.ToList(); + } + + + /// + /// Resets the API Key assigned with a user + /// + /// + [HttpPost("reset-api-key")] + public async Task> ResetApiKey() + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + + user.ApiKey = HashUtil.ApiKey(); + + if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) + { + return Ok(user.ApiKey); + } + + await _unitOfWork.RollbackAsync(); + return BadRequest("Something went wrong, unable to reset key"); + + } + + + /// + /// Initiates the flow to update a user's email address. The email address is not changed in this API. A confirmation link is sent/dumped which will + /// validate the email. It must be confirmed for the email to update. + /// + /// + /// Returns just if the email was sent or server isn't reachable + [HttpPost("update/email")] + public async Task UpdateEmail(UpdateEmailDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + if (user == null) return Unauthorized("You do not have permission"); + + if (dto == null || string.IsNullOrEmpty(dto.Email)) return BadRequest("Invalid payload"); + + // Validate no other users exist with this email + if (user.Email.Equals(dto.Email)) return Ok("Nothing to do"); + + // Check if email is used by another user + var existingUserEmail = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); + if (existingUserEmail != null) + { + return BadRequest("You cannot share emails across multiple accounts"); + } + + // All validations complete, generate a new token and email it to the user at the new address. Confirm email link will update the email + var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); + if (string.IsNullOrEmpty(token)) + { + _logger.LogError("There was an issue generating a token for the email"); + return BadRequest("There was an issue creating a confirmation email token. See logs."); + } + + user.EmailConfirmed = false; + user.ConfirmationToken = token; + await _userManager.UpdateAsync(user); + + // Send a confirmation email + try + { + var emailLink = GenerateEmailLink(user.ConfirmationToken, "confirm-email-update", dto.Email); + _logger.LogCritical("[Update Email]: Email Link for {UserName}: {Link}", user.UserName, emailLink); + var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); + var accessible = await _emailService.CheckIfAccessible(host); + if (accessible) + { + try + { + // Email the old address of the update change + await _emailService.SendEmailChangeEmail(new ConfirmationEmailDto() + { + EmailAddress = string.IsNullOrEmpty(user.Email) ? dto.Email : user.Email, + InstallId = BuildInfo.Version.ToString(), + InvitingUser = (await _unitOfWork.UserRepository.GetAdminUsersAsync()).First().UserName, + ServerConfirmationLink = emailLink + }); + } + catch (Exception) + { + /* Swallow exception */ + } } - _logger.LogInformation("{User}'s Password has been reset", resetPasswordDto.UserName); + return Ok(new InviteUserResponse + { + EmailLink = string.Empty, + EmailSent = accessible + }); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an error during invite user flow, unable to send an email"); + } + + + await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); + + return Ok(); + } + + [HttpPost("update/age-restriction")] + public async Task UpdateAgeRestriction(UpdateAgeRestrictionDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + if (user == null) return Unauthorized("You do not have permission"); + if (dto == null) return BadRequest("Invalid payload"); + + var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); + + user.AgeRestriction = isAdmin ? AgeRating.NotApplicable : dto.AgeRating; + user.AgeRestrictionIncludeUnknowns = isAdmin || dto.IncludeUnknowns; + + _unitOfWork.UserRepository.Update(user); + + if (!_unitOfWork.HasChanges()) return Ok(); + try + { + await _unitOfWork.CommitAsync(); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an error updating the age restriction"); + return BadRequest("There was an error updating the age restriction"); + } + + await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); + + return Ok(); + } + + /// + /// Update the user account. This can only affect Username, Email (will require confirming), Roles, and Library access. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("update")] + public async Task UpdateAccount(UpdateUserDto dto) + { + var adminUser = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + if (!await _unitOfWork.UserRepository.IsUserAdminAsync(adminUser)) return Unauthorized("You do not have permission"); + + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(dto.UserId); + if (user == null) return BadRequest("User does not exist"); + + // Check if username is changing + if (!user.UserName.Equals(dto.Username)) + { + // Validate username change + var errors = await _accountService.ValidateUsername(dto.Username); + if (errors.Any()) return BadRequest("Username already taken"); + user.UserName = dto.Username; + _unitOfWork.UserRepository.Update(user); + } + + // Update roles + var existingRoles = await _userManager.GetRolesAsync(user); + var hasAdminRole = dto.Roles.Contains(PolicyConstants.AdminRole); + if (!hasAdminRole) + { + dto.Roles.Add(PolicyConstants.PlebRole); + } + + if (existingRoles.Except(dto.Roles).Any() || dto.Roles.Except(existingRoles).Any()) + { + var roles = dto.Roles; + + var roleResult = await _userManager.RemoveFromRolesAsync(user, existingRoles); + if (!roleResult.Succeeded) return BadRequest(roleResult.Errors); + roleResult = await _userManager.AddToRolesAsync(user, roles); + if (!roleResult.Succeeded) return BadRequest(roleResult.Errors); + } + + + var allLibraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); + List libraries; + if (hasAdminRole) + { + _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", + user.UserName); + libraries = allLibraries; + } + else + { + // Remove user from all libraries + foreach (var lib in allLibraries) + { + lib.AppUsers ??= new List(); + lib.AppUsers.Remove(user); + } + + libraries = (await _unitOfWork.LibraryRepository.GetLibraryForIdsAsync(dto.Libraries, LibraryIncludes.AppUser)).ToList(); + } + + foreach (var lib in libraries) + { + lib.AppUsers ??= new List(); + lib.AppUsers.Add(user); + } + + user.AgeRestriction = hasAdminRole ? AgeRating.NotApplicable : dto.AgeRestriction.AgeRating; + user.AgeRestrictionIncludeUnknowns = hasAdminRole || dto.AgeRestriction.IncludeUnknowns; + + _unitOfWork.UserRepository.Update(user); + + if (!_unitOfWork.HasChanges() || await _unitOfWork.CommitAsync()) + { + await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); return Ok(); } - /// - /// Register the first user (admin) on the server. Will not do anything if an admin is already confirmed - /// - /// - /// - [AllowAnonymous] - [HttpPost("register")] - public async Task> RegisterFirstUser(RegisterDto registerDto) + await _unitOfWork.RollbackAsync(); + return BadRequest("There was an exception when updating the user"); + } + + /// + /// Requests the Invite Url for the UserId. Will return error if user is already validated. + /// + /// + /// Include the "https://ip:port/" in the generated link + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("invite-url")] + public async Task> GetInviteUrl(int userId, bool withBaseUrl) + { + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + if (user.EmailConfirmed) + return BadRequest("User is already confirmed"); + if (string.IsNullOrEmpty(user.ConfirmationToken)) + return BadRequest("Manual setup is unable to be completed. Please cancel and recreate the invite."); + + return GenerateEmailLink(user.ConfirmationToken, "confirm-email", user.Email, withBaseUrl); + } + + + /// + /// Invites a user to the server. Will generate a setup link for continuing setup. If the server is not accessible, no + /// email will be sent. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("invite")] + public async Task> InviteUser(InviteUserDto dto) + { + var adminUser = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + if (adminUser == null) return Unauthorized("You are not permitted"); + + _logger.LogInformation("{User} is inviting {Email} to the server", adminUser.UserName, dto.Email); + + // Check if there is an existing invite + if (!string.IsNullOrEmpty(dto.Email)) { - var admins = await _userManager.GetUsersInRoleAsync("Admin"); - if (admins.Count > 0) return BadRequest("Not allowed"); - - try - { - var usernameValidation = await _accountService.ValidateUsername(registerDto.Username); - if (usernameValidation.Any()) - { - return BadRequest(usernameValidation); - } - - var user = new AppUser() - { - UserName = registerDto.Username, - Email = registerDto.Email, - UserPreferences = new AppUserPreferences - { - Theme = await _unitOfWork.SiteThemeRepository.GetDefaultTheme() - }, - ApiKey = HashUtil.ApiKey() - }; - - var result = await _userManager.CreateAsync(user, registerDto.Password); - if (!result.Succeeded) return BadRequest(result.Errors); - - var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); - if (string.IsNullOrEmpty(token)) return BadRequest("There was an issue generating a confirmation token."); - if (!await ConfirmEmailToken(token, user)) return BadRequest($"There was an issue validating your email: {token}"); - - - var roleResult = await _userManager.AddToRoleAsync(user, PolicyConstants.AdminRole); - if (!roleResult.Succeeded) return BadRequest(result.Errors); - - return new UserDto - { - Username = user.UserName, - Email = user.Email, - Token = await _tokenService.CreateToken(user), - RefreshToken = await _tokenService.CreateRefreshToken(user), - ApiKey = user.ApiKey, - Preferences = _mapper.Map(user.UserPreferences) - }; - } - catch (Exception ex) - { - _logger.LogError(ex, "Something went wrong when registering user"); - // We need to manually delete the User as we've already committed - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(registerDto.Username); - _unitOfWork.UserRepository.Delete(user); - await _unitOfWork.CommitAsync(); - } - - return BadRequest("Something went wrong when registering user"); - } - - - /// - /// Perform a login. Will send JWT Token of the logged in user back. - /// - /// - /// - [AllowAnonymous] - [HttpPost("login")] - public async Task> Login(LoginDto loginDto) - { - var user = await _userManager.Users - .Include(u => u.UserPreferences) - .SingleOrDefaultAsync(x => x.NormalizedUserName == loginDto.Username.ToUpper()); - - if (user == null) return Unauthorized("Invalid username"); - - // Check if the user has an email, if not, inform them so they can migrate - var validPassword = await _signInManager.UserManager.CheckPasswordAsync(user, loginDto.Password); - if (string.IsNullOrEmpty(user.Email) && !user.EmailConfirmed && validPassword) - { - _logger.LogCritical("User {UserName} does not have an email. Providing a one time migration", user.UserName); - return Unauthorized( - "You are missing an email on your account. Please wait while we migrate your account."); - } - - var result = await _signInManager - .CheckPasswordSignInAsync(user, loginDto.Password, true); - - if (result.IsLockedOut) - { - return Unauthorized("You've been locked out from too many authorization attempts. Please wait 10 minutes."); - } - - if (!result.Succeeded) - { - return Unauthorized(result.IsNotAllowed ? "You must confirm your email first" : "Your credentials are not correct."); - } - - // Update LastActive on account - user.LastActive = DateTime.Now; - user.UserPreferences ??= new AppUserPreferences - { - Theme = await _unitOfWork.SiteThemeRepository.GetDefaultTheme() - }; - - _unitOfWork.UserRepository.Update(user); - await _unitOfWork.CommitAsync(); - - _logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive); - - var dto = _mapper.Map(user); - dto.Token = await _tokenService.CreateToken(user); - dto.RefreshToken = await _tokenService.CreateRefreshToken(user); - var pref = await _unitOfWork.UserRepository.GetPreferencesAsync(user.UserName); - pref.Theme ??= await _unitOfWork.SiteThemeRepository.GetDefaultTheme(); - dto.Preferences = _mapper.Map(pref); - return dto; - } - - /// - /// Refreshes the user's JWT token - /// - /// - /// - [AllowAnonymous] - [HttpPost("refresh-token")] - public async Task> RefreshToken([FromBody] TokenRequestDto tokenRequestDto) - { - var token = await _tokenService.ValidateRefreshToken(tokenRequestDto); - if (token == null) - { - return Unauthorized(new { message = "Invalid token" }); - } - - return Ok(token); - } - - /// - /// Get All Roles back. See - /// - /// - [HttpGet("roles")] - public ActionResult> GetRoles() - { - return typeof(PolicyConstants) - .GetFields(BindingFlags.Public | BindingFlags.Static) - .Where(f => f.FieldType == typeof(string)) - .ToDictionary(f => f.Name, - f => (string) f.GetValue(null)).Values.ToList(); - } - - - /// - /// Resets the API Key assigned with a user - /// - /// - [HttpPost("reset-api-key")] - public async Task> ResetApiKey() - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - - user.ApiKey = HashUtil.ApiKey(); - - if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) - { - return Ok(user.ApiKey); - } - - await _unitOfWork.RollbackAsync(); - return BadRequest("Something went wrong, unable to reset key"); - - } - - /// - /// Update the user account. This can only affect Username, Email (will require confirming), Roles, and Library access. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("update")] - public async Task UpdateAccount(UpdateUserDto dto) - { - var adminUser = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - if (!await _unitOfWork.UserRepository.IsUserAdminAsync(adminUser)) return Unauthorized("You do not have permission"); - - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(dto.UserId); - if (user == null) return BadRequest("User does not exist"); - - // Check if username is changing - if (!user.UserName.Equals(dto.Username)) - { - // Validate username change - var errors = await _accountService.ValidateUsername(dto.Username); - if (errors.Any()) return BadRequest("Username already taken"); - user.UserName = dto.Username; - _unitOfWork.UserRepository.Update(user); - } - - if (!user.Email.Equals(dto.Email)) - { - // Validate username change - var errors = await _accountService.ValidateEmail(dto.Email); - if (errors.Any()) return BadRequest("Email already registered"); - // NOTE: This needs to be handled differently, like save it in a temp variable in DB until email is validated. For now, I wont allow it - } - - // Update roles - var existingRoles = await _userManager.GetRolesAsync(user); - var hasAdminRole = dto.Roles.Contains(PolicyConstants.AdminRole); - if (!hasAdminRole) - { - dto.Roles.Add(PolicyConstants.PlebRole); - } - - if (existingRoles.Except(dto.Roles).Any() || dto.Roles.Except(existingRoles).Any()) - { - var roles = dto.Roles; - - var roleResult = await _userManager.RemoveFromRolesAsync(user, existingRoles); - if (!roleResult.Succeeded) return BadRequest(roleResult.Errors); - roleResult = await _userManager.AddToRolesAsync(user, roles); - if (!roleResult.Succeeded) return BadRequest(roleResult.Errors); - } - - - var allLibraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); - List libraries; - if (hasAdminRole) - { - _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", - user.UserName); - libraries = allLibraries; - } - else - { - // Remove user from all libraries - foreach (var lib in allLibraries) - { - lib.AppUsers ??= new List(); - lib.AppUsers.Remove(user); - } - - libraries = (await _unitOfWork.LibraryRepository.GetLibraryForIdsAsync(dto.Libraries, LibraryIncludes.AppUser)).ToList(); - } - - foreach (var lib in libraries) - { - lib.AppUsers ??= new List(); - lib.AppUsers.Add(user); - } - - if (!_unitOfWork.HasChanges() || await _unitOfWork.CommitAsync()) - { - await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); - return Ok(); - } - - await _unitOfWork.RollbackAsync(); - return BadRequest("There was an exception when updating the user"); - } - - /// - /// Requests the Invite Url for the UserId. Will return error if user is already validated. - /// - /// - /// Include the "https://ip:port/" in the generated link - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("invite-url")] - public async Task> GetInviteUrl(int userId, bool withBaseUrl) - { - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); - if (user.EmailConfirmed) - return BadRequest("User is already confirmed"); - if (string.IsNullOrEmpty(user.ConfirmationToken)) - return BadRequest("Manual setup is unable to be completed. Please cancel and recreate the invite."); - - return GenerateEmailLink(user.ConfirmationToken, "confirm-email", user.Email, withBaseUrl); - } - - - /// - /// Invites a user to the server. Will generate a setup link for continuing setup. If the server is not accessible, no - /// email will be sent. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("invite")] - public async Task> InviteUser(InviteUserDto dto) - { - var adminUser = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - if (adminUser == null) return Unauthorized("You need to login"); - _logger.LogInformation("{User} is inviting {Email} to the server", adminUser.UserName, dto.Email); - - // Check if there is an existing invite dto.Email = dto.Email.Trim(); var emailValidationErrors = await _accountService.ValidateEmail(dto.Email); if (emailValidationErrors.Any()) @@ -417,361 +519,411 @@ namespace API.Controllers return BadRequest($"User is already registered as {invitedUser.UserName}"); return BadRequest("User is already invited under this email and has yet to accepted invite."); } - - // Create a new user - var user = new AppUser() - { - UserName = dto.Email, - Email = dto.Email, - ApiKey = HashUtil.ApiKey(), - UserPreferences = new AppUserPreferences - { - Theme = await _unitOfWork.SiteThemeRepository.GetDefaultTheme() - } - }; - - try - { - var result = await _userManager.CreateAsync(user, AccountService.DefaultPassword); - if (!result.Succeeded) return BadRequest(result.Errors); - - // Assign Roles - var roles = dto.Roles; - var hasAdminRole = dto.Roles.Contains(PolicyConstants.AdminRole); - if (!hasAdminRole) - { - roles.Add(PolicyConstants.PlebRole); - } - - foreach (var role in roles) - { - if (!PolicyConstants.ValidRoles.Contains(role)) continue; - var roleResult = await _userManager.AddToRoleAsync(user, role); - if (!roleResult.Succeeded) - return - BadRequest(roleResult.Errors); - } - - // Grant access to libraries - List libraries; - if (hasAdminRole) - { - _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", - user.UserName); - libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync(LibraryIncludes.AppUser)).ToList(); - } - else - { - libraries = (await _unitOfWork.LibraryRepository.GetLibraryForIdsAsync(dto.Libraries, LibraryIncludes.AppUser)).ToList(); - } - - foreach (var lib in libraries) - { - lib.AppUsers ??= new List(); - lib.AppUsers.Add(user); - } - - var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); - if (string.IsNullOrEmpty(token)) - { - _logger.LogError("There was an issue generating a token for the email"); - return BadRequest("There was an creating the invite user"); - } - - user.ConfirmationToken = token; - await _unitOfWork.CommitAsync(); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an error during invite user flow, unable to create user. Deleting user for retry"); - _unitOfWork.UserRepository.Delete(user); - await _unitOfWork.CommitAsync(); - } - - try - { - var emailLink = GenerateEmailLink(user.ConfirmationToken, "confirm-email", dto.Email); - _logger.LogCritical("[Invite User]: Email Link for {UserName}: {Link}", user.UserName, emailLink); - _logger.LogCritical("[Invite User]: Token {UserName}: {Token}", user.UserName, user.ConfirmationToken); - var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); - var accessible = await _emailService.CheckIfAccessible(host); - if (accessible) - { - try - { - await _emailService.SendConfirmationEmail(new ConfirmationEmailDto() - { - EmailAddress = dto.Email, - InvitingUser = adminUser.UserName, - ServerConfirmationLink = emailLink - }); - } - catch (Exception) - { - /* Swallow exception */ - } - } - - return Ok(new InviteUserResponse - { - EmailLink = emailLink, - EmailSent = accessible - }); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an error during invite user flow, unable to send an email"); - } - - return BadRequest("There was an error setting up your account. Please check the logs"); } - [AllowAnonymous] - [HttpPost("confirm-email")] - public async Task> ConfirmEmail(ConfirmEmailDto dto) + // Create a new user + var user = new AppUser() { - var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); - - if (user == null) + UserName = dto.Email, + Email = dto.Email, + ApiKey = HashUtil.ApiKey(), + UserPreferences = new AppUserPreferences { - return BadRequest("The email does not match the registered email"); + Theme = await _unitOfWork.SiteThemeRepository.GetDefaultTheme() } + }; - // Validate Password and Username - var validationErrors = new List(); - validationErrors.AddRange(await _accountService.ValidateUsername(dto.Username)); - validationErrors.AddRange(await _accountService.ValidatePassword(user, dto.Password)); - - if (validationErrors.Any()) - { - return BadRequest(validationErrors); - } - - - if (!await ConfirmEmailToken(dto.Token, user)) return BadRequest("Invalid Email Token"); - - user.UserName = dto.Username; - user.ConfirmationToken = null; - var errors = await _accountService.ChangeUserPassword(user, dto.Password); - if (errors.Any()) - { - return BadRequest(errors); - } - await _unitOfWork.CommitAsync(); - - - user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(user.UserName, - AppUserIncludes.UserPreferences); - - // Perform Login code - return new UserDto - { - Username = user.UserName, - Email = user.Email, - Token = await _tokenService.CreateToken(user), - RefreshToken = await _tokenService.CreateRefreshToken(user), - ApiKey = user.ApiKey, - Preferences = _mapper.Map(user.UserPreferences) - }; - } - - [AllowAnonymous] - [HttpPost("confirm-password-reset")] - public async Task> ConfirmForgotPassword(ConfirmPasswordResetDto dto) + try { - try + var result = await _userManager.CreateAsync(user, AccountService.DefaultPassword); + if (!result.Succeeded) return BadRequest(result.Errors); + + // Assign Roles + var roles = dto.Roles; + var hasAdminRole = dto.Roles.Contains(PolicyConstants.AdminRole); + if (!hasAdminRole) { - var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); - if (user == null) - { - return BadRequest("Invalid Details"); - } - - var result = await _userManager.VerifyUserTokenAsync(user, TokenOptions.DefaultProvider, - "ResetPassword", dto.Token); - if (!result) return BadRequest("Unable to reset password, your email token is not correct."); - - var errors = await _accountService.ChangeUserPassword(user, dto.Password); - return errors.Any() ? BadRequest(errors) : Ok("Password updated"); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an unexpected error when confirming new password"); - return BadRequest("There was an unexpected error when confirming new password"); - } - } - - - /// - /// Will send user a link to update their password to their email or prompt them if not accessible - /// - /// - /// - [AllowAnonymous] - [HttpPost("forgot-password")] - public async Task> ForgotPassword([FromQuery] string email) - { - var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(email); - if (user == null) - { - _logger.LogError("There are no users with email: {Email} but user is requesting password reset", email); - return Ok("An email will be sent to the email if it exists in our database"); + roles.Add(PolicyConstants.PlebRole); } - var roles = await _userManager.GetRolesAsync(user); - - - if (!roles.Any(r => r is PolicyConstants.AdminRole or PolicyConstants.ChangePasswordRole)) - return Unauthorized("You are not permitted to this operation."); - - var token = await _userManager.GeneratePasswordResetTokenAsync(user); - var emailLink = GenerateEmailLink(token, "confirm-reset-password", user.Email); - _logger.LogCritical("[Forgot Password]: Email Link for {UserName}: {Link}", user.UserName, emailLink); - _logger.LogCritical("[Forgot Password]: Token {UserName}: {Token}", user.UserName, token); - var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); - if (await _emailService.CheckIfAccessible(host)) + foreach (var role in roles) { - await _emailService.SendPasswordResetEmail(new PasswordResetEmailDto() - { - EmailAddress = user.Email, - ServerConfirmationLink = emailLink, - InstallId = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId)).Value - }); - return Ok("Email sent"); + if (!PolicyConstants.ValidRoles.Contains(role)) continue; + var roleResult = await _userManager.AddToRoleAsync(user, role); + if (!roleResult.Succeeded) + return + BadRequest(roleResult.Errors); } - return Ok("Your server is not accessible. The Link to reset your password is in the logs."); - } - - [AllowAnonymous] - [HttpPost("confirm-migration-email")] - public async Task> ConfirmMigrationEmail(ConfirmMigrationEmailDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); - if (user == null) return BadRequest("This email is not on system"); - - if (!await ConfirmEmailToken(dto.Token, user)) return BadRequest("Invalid Email Token"); - - await _unitOfWork.CommitAsync(); - - user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(user.UserName, - AppUserIncludes.UserPreferences); - - // Perform Login code - return new UserDto + // Grant access to libraries + List libraries; + if (hasAdminRole) { - Username = user.UserName, - Email = user.Email, - Token = await _tokenService.CreateToken(user), - RefreshToken = await _tokenService.CreateRefreshToken(user), - ApiKey = user.ApiKey, - Preferences = _mapper.Map(user.UserPreferences) - }; - } + _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", + user.UserName); + libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync(LibraryIncludes.AppUser)).ToList(); + } + else + { + libraries = (await _unitOfWork.LibraryRepository.GetLibraryForIdsAsync(dto.Libraries, LibraryIncludes.AppUser)).ToList(); + } - [HttpPost("resend-confirmation-email")] - public async Task> ResendConfirmationSendEmail([FromQuery] int userId) - { - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); - if (user == null) return BadRequest("User does not exist"); + foreach (var lib in libraries) + { + lib.AppUsers ??= new List(); + lib.AppUsers.Add(user); + } - if (string.IsNullOrEmpty(user.Email)) - return BadRequest( - "This user needs to migrate. Have them log out and login to trigger a migration flow"); - if (user.EmailConfirmed) return BadRequest("User already confirmed"); + user.AgeRestriction = hasAdminRole ? AgeRating.NotApplicable : dto.AgeRestriction.AgeRating; + user.AgeRestrictionIncludeUnknowns = hasAdminRole || dto.AgeRestriction.IncludeUnknowns; var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); - var emailLink = GenerateEmailLink(token, "confirm-email", user.Email); - _logger.LogCritical("[Email Migration]: Email Link: {Link}", emailLink); - _logger.LogCritical("[Email Migration]: Token {UserName}: {Token}", user.UserName, token); - await _emailService.SendMigrationEmail(new EmailMigrationDto() + if (string.IsNullOrEmpty(token)) + { + _logger.LogError("There was an issue generating a token for the email"); + return BadRequest("There was an creating the invite user"); + } + + user.ConfirmationToken = token; + await _unitOfWork.CommitAsync(); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an error during invite user flow, unable to create user. Deleting user for retry"); + _unitOfWork.UserRepository.Delete(user); + await _unitOfWork.CommitAsync(); + } + + try + { + var emailLink = GenerateEmailLink(user.ConfirmationToken, "confirm-email", dto.Email); + _logger.LogCritical("[Invite User]: Email Link for {UserName}: {Link}", user.UserName, emailLink); + _logger.LogCritical("[Invite User]: Token {UserName}: {Token}", user.UserName, user.ConfirmationToken); + var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); + var accessible = await _emailService.CheckIfAccessible(host); + if (accessible) + { + try + { + await _emailService.SendConfirmationEmail(new ConfirmationEmailDto() + { + EmailAddress = dto.Email, + InvitingUser = adminUser.UserName, + ServerConfirmationLink = emailLink + }); + } + catch (Exception) + { + /* Swallow exception */ + } + } + + return Ok(new InviteUserResponse + { + EmailLink = emailLink, + EmailSent = accessible + }); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an error during invite user flow, unable to send an email"); + } + + return BadRequest("There was an error setting up your account. Please check the logs"); + } + + [AllowAnonymous] + [HttpPost("confirm-email")] + public async Task> ConfirmEmail(ConfirmEmailDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); + + if (user == null) + { + return BadRequest("The email does not match the registered email"); + } + + // Validate Password and Username + var validationErrors = new List(); + validationErrors.AddRange(await _accountService.ValidateUsername(dto.Username)); + validationErrors.AddRange(await _accountService.ValidatePassword(user, dto.Password)); + + if (validationErrors.Any()) + { + return BadRequest(validationErrors); + } + + + if (!await ConfirmEmailToken(dto.Token, user)) return BadRequest("Invalid Email Token"); + + user.UserName = dto.Username; + user.ConfirmationToken = null; + var errors = await _accountService.ChangeUserPassword(user, dto.Password); + if (errors.Any()) + { + return BadRequest(errors); + } + await _unitOfWork.CommitAsync(); + + + user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(user.UserName, + AppUserIncludes.UserPreferences); + + // Perform Login code + return new UserDto + { + Username = user.UserName, + Email = user.Email, + Token = await _tokenService.CreateToken(user), + RefreshToken = await _tokenService.CreateRefreshToken(user), + ApiKey = user.ApiKey, + Preferences = _mapper.Map(user.UserPreferences) + }; + } + + /// + /// Final step in email update change. Given a confirmation token and the email, this will finish the email change. + /// + /// This will force connected clients to re-authenticate + /// + /// + [AllowAnonymous] + [HttpPost("confirm-email-update")] + public async Task ConfirmEmailUpdate(ConfirmEmailUpdateDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByConfirmationToken(dto.Token); + if (user == null) + { + return BadRequest("Invalid Email Token"); + } + + if (!await ConfirmEmailToken(dto.Token, user)) return BadRequest("Invalid Email Token"); + + + _logger.LogInformation("User is updating email from {OldEmail} to {NewEmail}", user.Email, dto.Email); + var result = await _userManager.SetEmailAsync(user, dto.Email); + if (!result.Succeeded) + { + _logger.LogError("Unable to update email for users: {Errors}", result.Errors.Select(e => e.Description)); + return BadRequest("Unable to update email for user. Check logs"); + } + user.ConfirmationToken = null; + await _unitOfWork.CommitAsync(); + + + // For the user's connected devices to pull the new information in + await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, + MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); + + // Perform Login code + return Ok(); + } + + [AllowAnonymous] + [HttpPost("confirm-password-reset")] + public async Task> ConfirmForgotPassword(ConfirmPasswordResetDto dto) + { + try + { + var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); + if (user == null) + { + return BadRequest("Invalid Details"); + } + + var result = await _userManager.VerifyUserTokenAsync(user, TokenOptions.DefaultProvider, + "ResetPassword", dto.Token); + if (!result) return BadRequest("Unable to reset password, your email token is not correct."); + + var errors = await _accountService.ChangeUserPassword(user, dto.Password); + return errors.Any() ? BadRequest(errors) : Ok("Password updated"); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an unexpected error when confirming new password"); + return BadRequest("There was an unexpected error when confirming new password"); + } + } + + + /// + /// Will send user a link to update their password to their email or prompt them if not accessible + /// + /// + /// + [AllowAnonymous] + [HttpPost("forgot-password")] + public async Task> ForgotPassword([FromQuery] string email) + { + var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(email); + if (user == null) + { + _logger.LogError("There are no users with email: {Email} but user is requesting password reset", email); + return Ok("An email will be sent to the email if it exists in our database"); + } + + var roles = await _userManager.GetRolesAsync(user); + if (!roles.Any(r => r is PolicyConstants.AdminRole or PolicyConstants.ChangePasswordRole)) + return Unauthorized("You are not permitted to this operation."); + + if (string.IsNullOrEmpty(user.Email) || !user.EmailConfirmed) + return BadRequest("You do not have an email on account or it has not been confirmed"); + + var token = await _userManager.GeneratePasswordResetTokenAsync(user); + var emailLink = GenerateEmailLink(token, "confirm-reset-password", user.Email); + _logger.LogCritical("[Forgot Password]: Email Link for {UserName}: {Link}", user.UserName, emailLink); + var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); + if (await _emailService.CheckIfAccessible(host)) + { + await _emailService.SendPasswordResetEmail(new PasswordResetEmailDto() { EmailAddress = user.Email, - Username = user.UserName, ServerConfirmationLink = emailLink, InstallId = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId)).Value }); - - - return Ok(emailLink); + return Ok("Email sent"); } - private string GenerateEmailLink(string token, string routePart, string email, bool withHost = true) + return Ok("Your server is not accessible. The Link to reset your password is in the logs."); + } + + [HttpGet("email-confirmed")] + public async Task> IsEmailConfirmed() + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + if (user == null) return Unauthorized(); + + return Ok(user.EmailConfirmed); + } + + [AllowAnonymous] + [HttpPost("confirm-migration-email")] + public async Task> ConfirmMigrationEmail(ConfirmMigrationEmailDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); + if (user == null) return BadRequest("This email is not on system"); + + if (!await ConfirmEmailToken(dto.Token, user)) return BadRequest("Invalid Email Token"); + + await _unitOfWork.CommitAsync(); + + user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(user.UserName, + AppUserIncludes.UserPreferences); + + // Perform Login code + return new UserDto { - var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); - if (withHost) return $"{Request.Scheme}://{host}{Request.PathBase}/registration/{routePart}?token={HttpUtility.UrlEncode(token)}&email={HttpUtility.UrlEncode(email)}"; - return $"registration/{routePart}?token={HttpUtility.UrlEncode(token)}&email={HttpUtility.UrlEncode(email)}"; - } + Username = user.UserName, + Email = user.Email, + Token = await _tokenService.CreateToken(user), + RefreshToken = await _tokenService.CreateRefreshToken(user), + ApiKey = user.ApiKey, + Preferences = _mapper.Map(user.UserPreferences) + }; + } - /// - /// This is similar to invite. Essentially we authenticate the user's password then go through invite email flow - /// - /// - /// - [AllowAnonymous] - [HttpPost("migrate-email")] - public async Task> MigrateEmail(MigrateUserEmailDto dto) + [HttpPost("resend-confirmation-email")] + public async Task> ResendConfirmationSendEmail([FromQuery] int userId) + { + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + if (user == null) return BadRequest("User does not exist"); + + if (string.IsNullOrEmpty(user.Email)) + return BadRequest( + "This user needs to migrate. Have them log out and login to trigger a migration flow"); + if (user.EmailConfirmed) return BadRequest("User already confirmed"); + + var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); + var emailLink = GenerateEmailLink(token, "confirm-email", user.Email); + _logger.LogCritical("[Email Migration]: Email Link: {Link}", emailLink); + _logger.LogCritical("[Email Migration]: Token {UserName}: {Token}", user.UserName, token); + await _emailService.SendMigrationEmail(new EmailMigrationDto() { - // Check if there is an existing invite - var emailValidationErrors = await _accountService.ValidateEmail(dto.Email); - if (emailValidationErrors.Any()) - { - var invitedUser = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); - if (await _userManager.IsEmailConfirmedAsync(invitedUser)) - return BadRequest($"User is already registered as {invitedUser.UserName}"); - - _logger.LogInformation("A user is attempting to login, but hasn't accepted email invite"); - return BadRequest("User is already invited under this email and has yet to accepted invite."); - } + EmailAddress = user.Email, + Username = user.UserName, + ServerConfirmationLink = emailLink, + InstallId = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId)).Value + }); - var user = await _userManager.Users - .Include(u => u.UserPreferences) - .SingleOrDefaultAsync(x => x.NormalizedUserName == dto.Username.ToUpper()); - if (user == null) return BadRequest("Invalid username"); + return Ok(emailLink); + } - var validPassword = await _signInManager.UserManager.CheckPasswordAsync(user, dto.Password); - if (!validPassword) return BadRequest("Your credentials are not correct"); + private string GenerateEmailLink(string token, string routePart, string email, bool withHost = true) + { + var host = _environment.IsDevelopment() ? "localhost:4200" : Request.Host.ToString(); + if (withHost) return $"{Request.Scheme}://{host}{Request.PathBase}/registration/{routePart}?token={HttpUtility.UrlEncode(token)}&email={HttpUtility.UrlEncode(email)}"; + return $"registration/{routePart}?token={HttpUtility.UrlEncode(token)}&email={HttpUtility.UrlEncode(email)}"; + } - try - { - var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); - - user.Email = dto.Email; - if (!await ConfirmEmailToken(token, user)) return BadRequest("There was a critical error during migration"); - _unitOfWork.UserRepository.Update(user); - - await _unitOfWork.CommitAsync(); - - return Ok(); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue during email migration. Contact support"); - _unitOfWork.UserRepository.Delete(user); - await _unitOfWork.CommitAsync(); - } - - return BadRequest("There was an error setting up your account. Please check the logs"); - } - - private async Task ConfirmEmailToken(string token, AppUser user) + /// + /// This is similar to invite. Essentially we authenticate the user's password then go through invite email flow + /// + /// + /// + [AllowAnonymous] + [HttpPost("migrate-email")] + public async Task> MigrateEmail(MigrateUserEmailDto dto) + { + // Check if there is an existing invite + var emailValidationErrors = await _accountService.ValidateEmail(dto.Email); + if (emailValidationErrors.Any()) { - var result = await _userManager.ConfirmEmailAsync(user, token); - if (result.Succeeded) return true; - - - - _logger.LogCritical("[Account] Email validation failed"); - if (!result.Errors.Any()) return false; - - foreach (var error in result.Errors) - { - _logger.LogCritical("[Account] Email validation error: {Message}", error.Description); - } - - return false; + var invitedUser = await _unitOfWork.UserRepository.GetUserByEmailAsync(dto.Email); + if (await _userManager.IsEmailConfirmedAsync(invitedUser)) + return BadRequest($"User is already registered as {invitedUser.UserName}"); + _logger.LogInformation("A user is attempting to login, but hasn't accepted email invite"); + return BadRequest("User is already invited under this email and has yet to accepted invite."); } + + + var user = await _userManager.Users + .Include(u => u.UserPreferences) + .SingleOrDefaultAsync(x => x.NormalizedUserName == dto.Username.ToUpper()); + if (user == null) return BadRequest("Invalid username"); + + var validPassword = await _signInManager.UserManager.CheckPasswordAsync(user, dto.Password); + if (!validPassword) return BadRequest("Your credentials are not correct"); + + try + { + var token = await _userManager.GenerateEmailConfirmationTokenAsync(user); + + user.Email = dto.Email; + if (!await ConfirmEmailToken(token, user)) return BadRequest("There was a critical error during migration"); + _unitOfWork.UserRepository.Update(user); + + await _unitOfWork.CommitAsync(); + + return Ok(); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue during email migration. Contact support"); + _unitOfWork.UserRepository.Delete(user); + await _unitOfWork.CommitAsync(); + } + + return BadRequest("There was an error setting up your account. Please check the logs"); + } + + private async Task ConfirmEmailToken(string token, AppUser user) + { + var result = await _userManager.ConfirmEmailAsync(user, token); + if (result.Succeeded) return true; + + + + _logger.LogCritical("[Account] Email validation failed"); + if (!result.Errors.Any()) return false; + + foreach (var error in result.Errors) + { + _logger.LogCritical("[Account] Email validation error: {Message}", error.Description); + } + + return false; + } } diff --git a/API/Controllers/AdminController.cs b/API/Controllers/AdminController.cs index 045cc63dc..25bde9ddb 100644 --- a/API/Controllers/AdminController.cs +++ b/API/Controllers/AdminController.cs @@ -4,27 +4,26 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; -namespace API.Controllers +namespace API.Controllers; + +public class AdminController : BaseApiController { - public class AdminController : BaseApiController + private readonly UserManager _userManager; + + public AdminController(UserManager userManager) { - private readonly UserManager _userManager; + _userManager = userManager; + } - public AdminController(UserManager userManager) - { - _userManager = userManager; - } - - /// - /// Checks if an admin exists on the system. This is essentially a check to validate if the system has been setup. - /// - /// - [AllowAnonymous] - [HttpGet("exists")] - public async Task> AdminExists() - { - var users = await _userManager.GetUsersInRoleAsync("Admin"); - return users.Count > 0; - } + /// + /// Checks if an admin exists on the system. This is essentially a check to validate if the system has been setup. + /// + /// + [AllowAnonymous] + [HttpGet("exists")] + public async Task> AdminExists() + { + var users = await _userManager.GetUsersInRoleAsync("Admin"); + return users.Count > 0; } } diff --git a/API/Controllers/BaseApiController.cs b/API/Controllers/BaseApiController.cs index dfedd7a0a..2ac2b5cce 100644 --- a/API/Controllers/BaseApiController.cs +++ b/API/Controllers/BaseApiController.cs @@ -1,12 +1,11 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; -namespace API.Controllers +namespace API.Controllers; + +[ApiController] +[Route("api/[controller]")] +[Authorize] +public class BaseApiController : ControllerBase { - [ApiController] - [Route("api/[controller]")] - [Authorize] - public class BaseApiController : ControllerBase - { - } } diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs index d254af8dc..a3cae9d80 100644 --- a/API/Controllers/BookController.cs +++ b/API/Controllers/BookController.cs @@ -13,151 +13,147 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using VersOne.Epub; -namespace API.Controllers +namespace API.Controllers; + +public class BookController : BaseApiController { - public class BookController : BaseApiController + private readonly IBookService _bookService; + private readonly IUnitOfWork _unitOfWork; + private readonly ICacheService _cacheService; + + public BookController(IBookService bookService, + IUnitOfWork unitOfWork, ICacheService cacheService) { - private readonly IBookService _bookService; - private readonly IUnitOfWork _unitOfWork; - private readonly ICacheService _cacheService; - - public BookController(IBookService bookService, - IUnitOfWork unitOfWork, ICacheService cacheService) - { - _bookService = bookService; - _unitOfWork = unitOfWork; - _cacheService = cacheService; - } - - /// - /// Retrieves information for the PDF and Epub reader - /// - /// This only applies to Epub or PDF files - /// - /// - [HttpGet("{chapterId}/book-info")] - public async Task> GetBookInfo(int chapterId) - { - var dto = await _unitOfWork.ChapterRepository.GetChapterInfoDtoAsync(chapterId); - var bookTitle = string.Empty; - switch (dto.SeriesFormat) - { - case MangaFormat.Epub: - { - var mangaFile = (await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId)).First(); - using var book = await EpubReader.OpenBookAsync(mangaFile.FilePath, BookService.BookReaderOptions); - bookTitle = book.Title; - break; - } - case MangaFormat.Pdf: - { - var mangaFile = (await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId)).First(); - if (string.IsNullOrEmpty(bookTitle)) - { - // Override with filename - bookTitle = Path.GetFileNameWithoutExtension(mangaFile.FilePath); - } - - break; - } - case MangaFormat.Image: - break; - case MangaFormat.Archive: - break; - case MangaFormat.Unknown: - break; - default: - throw new ArgumentOutOfRangeException(); - } - - return Ok(new BookInfoDto() - { - ChapterNumber = dto.ChapterNumber, - VolumeNumber = dto.VolumeNumber, - VolumeId = dto.VolumeId, - BookTitle = bookTitle, - SeriesName = dto.SeriesName, - SeriesFormat = dto.SeriesFormat, - SeriesId = dto.SeriesId, - LibraryId = dto.LibraryId, - IsSpecial = dto.IsSpecial, - Pages = dto.Pages, - }); - } - - /// - /// This is an entry point to fetch resources from within an epub chapter/book. - /// - /// - /// - /// - [HttpGet("{chapterId}/book-resources")] - [ResponseCache(Duration = 60 * 1, Location = ResponseCacheLocation.Client, NoStore = false)] - [AllowAnonymous] - public async Task GetBookPageResources(int chapterId, [FromQuery] string file) - { - if (chapterId <= 0) return BadRequest("Chapter is not valid"); - var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); - using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath, BookService.BookReaderOptions); - - var key = BookService.CleanContentKeys(file); - if (!book.Content.AllFiles.ContainsKey(key)) return BadRequest("File was not found in book"); - - var bookFile = book.Content.AllFiles[key]; - var content = await bookFile.ReadContentAsBytesAsync(); - - var contentType = BookService.GetContentType(bookFile.ContentType); - return File(content, contentType, $"{chapterId}-{file}"); - } - - /// - /// This will return a list of mappings from ID -> page num. ID will be the xhtml key and page num will be the reading order - /// this is used to rewrite anchors in the book text so that we always load properly in our reader. - /// - /// This is essentially building the table of contents - /// - /// - [HttpGet("{chapterId}/chapters")] - public async Task>> GetBookChapters(int chapterId) - { - if (chapterId <= 0) return BadRequest("Chapter is not valid"); - - var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); - try - { - return Ok(await _bookService.GenerateTableOfContents(chapter)); - } - catch (KavitaException ex) - { - return BadRequest(ex.Message); - } - } - - - /// - /// This returns a single page within the epub book. All html will be rewritten to be scoped within our reader, - /// all css is scoped, etc. - /// - /// - /// - /// - [HttpGet("{chapterId}/book-page")] - public async Task> GetBookPage(int chapterId, [FromQuery] int page) - { - var chapter = await _cacheService.Ensure(chapterId); - var path = _cacheService.GetCachedFile(chapter); - - var baseUrl = "//" + Request.Host + Request.PathBase + "/api/"; - - try - { - return Ok(await _bookService.GetBookPage(page, chapterId, path, baseUrl)); - } - catch (KavitaException ex) - { - return BadRequest(ex.Message); - } - } - + _bookService = bookService; + _unitOfWork = unitOfWork; + _cacheService = cacheService; } + + /// + /// Retrieves information for the PDF and Epub reader + /// + /// This only applies to Epub or PDF files + /// + /// + [HttpGet("{chapterId}/book-info")] + public async Task> GetBookInfo(int chapterId) + { + var dto = await _unitOfWork.ChapterRepository.GetChapterInfoDtoAsync(chapterId); + var bookTitle = string.Empty; + switch (dto.SeriesFormat) + { + case MangaFormat.Epub: + { + var mangaFile = (await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId)).First(); + using var book = await EpubReader.OpenBookAsync(mangaFile.FilePath, BookService.BookReaderOptions); + bookTitle = book.Title; + break; + } + case MangaFormat.Pdf: + { + var mangaFile = (await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId)).First(); + if (string.IsNullOrEmpty(bookTitle)) + { + // Override with filename + bookTitle = Path.GetFileNameWithoutExtension(mangaFile.FilePath); + } + + break; + } + case MangaFormat.Image: + case MangaFormat.Archive: + case MangaFormat.Unknown: + default: + break; + } + + return Ok(new BookInfoDto() + { + ChapterNumber = dto.ChapterNumber, + VolumeNumber = dto.VolumeNumber, + VolumeId = dto.VolumeId, + BookTitle = bookTitle, + SeriesName = dto.SeriesName, + SeriesFormat = dto.SeriesFormat, + SeriesId = dto.SeriesId, + LibraryId = dto.LibraryId, + IsSpecial = dto.IsSpecial, + Pages = dto.Pages, + }); + } + + /// + /// This is an entry point to fetch resources from within an epub chapter/book. + /// + /// + /// + /// + [HttpGet("{chapterId}/book-resources")] + [ResponseCache(Duration = 60 * 1, Location = ResponseCacheLocation.Client, NoStore = false)] + [AllowAnonymous] + public async Task GetBookPageResources(int chapterId, [FromQuery] string file) + { + if (chapterId <= 0) return BadRequest("Chapter is not valid"); + var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); + using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath, BookService.BookReaderOptions); + + var key = BookService.CleanContentKeys(file); + if (!book.Content.AllFiles.ContainsKey(key)) return BadRequest("File was not found in book"); + + var bookFile = book.Content.AllFiles[key]; + var content = await bookFile.ReadContentAsBytesAsync(); + + var contentType = BookService.GetContentType(bookFile.ContentType); + return File(content, contentType, $"{chapterId}-{file}"); + } + + /// + /// This will return a list of mappings from ID -> page num. ID will be the xhtml key and page num will be the reading order + /// this is used to rewrite anchors in the book text so that we always load properly in our reader. + /// + /// This is essentially building the table of contents + /// + /// + [HttpGet("{chapterId}/chapters")] + public async Task>> GetBookChapters(int chapterId) + { + if (chapterId <= 0) return BadRequest("Chapter is not valid"); + + var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); + try + { + return Ok(await _bookService.GenerateTableOfContents(chapter)); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + + /// + /// This returns a single page within the epub book. All html will be rewritten to be scoped within our reader, + /// all css is scoped, etc. + /// + /// + /// + /// + [HttpGet("{chapterId}/book-page")] + public async Task> GetBookPage(int chapterId, [FromQuery] int page) + { + var chapter = await _cacheService.Ensure(chapterId); + var path = _cacheService.GetCachedFile(chapter); + + var baseUrl = "//" + Request.Host + Request.PathBase + "/api/"; + + try + { + return Ok(await _bookService.GetBookPage(page, chapterId, path, baseUrl)); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + } diff --git a/API/Controllers/CollectionController.cs b/API/Controllers/CollectionController.cs index f030bd166..33bde22b6 100644 --- a/API/Controllers/CollectionController.cs +++ b/API/Controllers/CollectionController.cs @@ -11,182 +11,183 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.SignalR; -namespace API.Controllers +namespace API.Controllers; + +/// +/// APIs for Collections +/// +public class CollectionController : BaseApiController { - /// - /// APIs for Collections - /// - public class CollectionController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IEventHub _eventHub; + + /// + public CollectionController(IUnitOfWork unitOfWork, IEventHub eventHub) { - private readonly IUnitOfWork _unitOfWork; - private readonly IEventHub _eventHub; + _unitOfWork = unitOfWork; + _eventHub = eventHub; + } - /// - public CollectionController(IUnitOfWork unitOfWork, IEventHub eventHub) + /// + /// Return a list of all collection tags on the server + /// + /// + [HttpGet] + public async Task> GetAllTags() + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); + if (isAdmin) { - _unitOfWork = unitOfWork; - _eventHub = eventHub; + return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); } - /// - /// Return a list of all collection tags on the server - /// - /// - [HttpGet] - public async Task> GetAllTags() + return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(user.Id); + } + + /// + /// Searches against the collection tags on the DB and returns matches that meet the search criteria. + /// Search strings will be cleaned of certain fields, like % + /// + /// Search term + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("search")] + public async Task> SearchTags(string queryString) + { + queryString ??= ""; + queryString = queryString.Replace(@"%", string.Empty); + if (queryString.Length == 0) return await GetAllTags(); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + return await _unitOfWork.CollectionTagRepository.SearchTagDtosAsync(queryString, user.Id); + } + + /// + /// Updates an existing tag with a new title, promotion status, and summary. + /// UI does not contain controls to update title + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("update")] + public async Task UpdateTagPromotion(CollectionTagDto updatedTag) + { + var existingTag = await _unitOfWork.CollectionTagRepository.GetTagAsync(updatedTag.Id); + if (existingTag == null) return BadRequest("This tag does not exist"); + + existingTag.Promoted = updatedTag.Promoted; + existingTag.Title = updatedTag.Title.Trim(); + existingTag.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(updatedTag.Title).ToUpper(); + existingTag.Summary = updatedTag.Summary.Trim(); + + if (_unitOfWork.HasChanges()) { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); - if (isAdmin) - { - return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); - } - return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); - } - - /// - /// Searches against the collection tags on the DB and returns matches that meet the search criteria. - /// Search strings will be cleaned of certain fields, like % - /// - /// Search term - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("search")] - public async Task> SearchTags(string queryString) - { - queryString ??= ""; - queryString = queryString.Replace(@"%", string.Empty); - if (queryString.Length == 0) return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); - - return await _unitOfWork.CollectionTagRepository.SearchTagDtosAsync(queryString); - } - - /// - /// Updates an existing tag with a new title, promotion status, and summary. - /// UI does not contain controls to update title - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("update")] - public async Task UpdateTagPromotion(CollectionTagDto updatedTag) - { - var existingTag = await _unitOfWork.CollectionTagRepository.GetTagAsync(updatedTag.Id); - if (existingTag == null) return BadRequest("This tag does not exist"); - - existingTag.Promoted = updatedTag.Promoted; - existingTag.Title = updatedTag.Title.Trim(); - existingTag.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(updatedTag.Title).ToUpper(); - existingTag.Summary = updatedTag.Summary.Trim(); - - if (_unitOfWork.HasChanges()) - { - if (await _unitOfWork.CommitAsync()) - { - return Ok("Tag updated successfully"); - } - } - else + if (await _unitOfWork.CommitAsync()) { return Ok("Tag updated successfully"); } - - return BadRequest("Something went wrong, please try again"); + } + else + { + return Ok("Tag updated successfully"); } - /// - /// Adds a collection tag onto multiple Series. If tag id is 0, this will create a new tag. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("update-for-series")] - public async Task AddToMultipleSeries(CollectionTagBulkAddDto dto) + return BadRequest("Something went wrong, please try again"); + } + + /// + /// Adds a collection tag onto multiple Series. If tag id is 0, this will create a new tag. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("update-for-series")] + public async Task AddToMultipleSeries(CollectionTagBulkAddDto dto) + { + var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(dto.CollectionTagId); + if (tag == null) { - var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(dto.CollectionTagId); - if (tag == null) + tag = DbFactory.CollectionTag(0, dto.CollectionTagTitle, String.Empty, false); + _unitOfWork.CollectionTagRepository.Add(tag); + } + + + var seriesMetadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(dto.SeriesIds); + foreach (var metadata in seriesMetadatas) + { + if (!metadata.CollectionTags.Any(t => t.Title.Equals(tag.Title, StringComparison.InvariantCulture))) { - tag = DbFactory.CollectionTag(0, dto.CollectionTagTitle, String.Empty, false); - _unitOfWork.CollectionTagRepository.Add(tag); + metadata.CollectionTags.Add(tag); + _unitOfWork.SeriesMetadataRepository.Update(metadata); + } + } + + if (!_unitOfWork.HasChanges()) return Ok(); + if (await _unitOfWork.CommitAsync()) + { + return Ok(); + } + return BadRequest("There was an issue updating series with collection tag"); + } + + /// + /// For a given tag, update the summary if summary has changed and remove a set of series from the tag. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("update-series")] + public async Task UpdateSeriesForTag(UpdateSeriesForTagDto updateSeriesForTagDto) + { + try + { + var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); + if (tag == null) return BadRequest("Not a valid Tag"); + tag.SeriesMetadatas ??= new List(); + + // Check if Tag has updated (Summary) + if (tag.Summary == null || !tag.Summary.Equals(updateSeriesForTagDto.Tag.Summary)) + { + tag.Summary = updateSeriesForTagDto.Tag.Summary; + _unitOfWork.CollectionTagRepository.Update(tag); + } + + tag.CoverImageLocked = updateSeriesForTagDto.Tag.CoverImageLocked; + + if (!updateSeriesForTagDto.Tag.CoverImageLocked) + { + tag.CoverImageLocked = false; + tag.CoverImage = string.Empty; + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(tag.Id, MessageFactoryEntityTypes.CollectionTag), false); + _unitOfWork.CollectionTagRepository.Update(tag); + } + + foreach (var seriesIdToRemove in updateSeriesForTagDto.SeriesIdsToRemove) + { + tag.SeriesMetadatas.Remove(tag.SeriesMetadatas.Single(sm => sm.SeriesId == seriesIdToRemove)); } - var seriesMetadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIdsAsync(dto.SeriesIds); - foreach (var metadata in seriesMetadatas) + if (tag.SeriesMetadatas.Count == 0) { - if (!metadata.CollectionTags.Any(t => t.Title.Equals(tag.Title, StringComparison.InvariantCulture))) - { - metadata.CollectionTags.Add(tag); - _unitOfWork.SeriesMetadataRepository.Update(metadata); - } + _unitOfWork.CollectionTagRepository.Remove(tag); } - if (!_unitOfWork.HasChanges()) return Ok(); + if (!_unitOfWork.HasChanges()) return Ok("No updates"); + if (await _unitOfWork.CommitAsync()) { - return Ok(); + return Ok("Tag updated"); } - return BadRequest("There was an issue updating series with collection tag"); } - - /// - /// For a given tag, update the summary if summary has changed and remove a set of series from the tag. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("update-series")] - public async Task UpdateSeriesForTag(UpdateSeriesForTagDto updateSeriesForTagDto) + catch (Exception) { - try - { - var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); - if (tag == null) return BadRequest("Not a valid Tag"); - tag.SeriesMetadatas ??= new List(); - - // Check if Tag has updated (Summary) - if (tag.Summary == null || !tag.Summary.Equals(updateSeriesForTagDto.Tag.Summary)) - { - tag.Summary = updateSeriesForTagDto.Tag.Summary; - _unitOfWork.CollectionTagRepository.Update(tag); - } - - tag.CoverImageLocked = updateSeriesForTagDto.Tag.CoverImageLocked; - - if (!updateSeriesForTagDto.Tag.CoverImageLocked) - { - tag.CoverImageLocked = false; - tag.CoverImage = string.Empty; - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(tag.Id, MessageFactoryEntityTypes.CollectionTag), false); - _unitOfWork.CollectionTagRepository.Update(tag); - } - - foreach (var seriesIdToRemove in updateSeriesForTagDto.SeriesIdsToRemove) - { - tag.SeriesMetadatas.Remove(tag.SeriesMetadatas.Single(sm => sm.SeriesId == seriesIdToRemove)); - } - - - if (tag.SeriesMetadatas.Count == 0) - { - _unitOfWork.CollectionTagRepository.Remove(tag); - } - - if (!_unitOfWork.HasChanges()) return Ok("No updates"); - - if (await _unitOfWork.CommitAsync()) - { - return Ok("Tag updated"); - } - } - catch (Exception) - { - await _unitOfWork.RollbackAsync(); - } - - - return BadRequest("Something went wrong. Please try again."); + await _unitOfWork.RollbackAsync(); } + + + return BadRequest("Something went wrong. Please try again."); } } diff --git a/API/Controllers/DeviceController.cs b/API/Controllers/DeviceController.cs new file mode 100644 index 000000000..3d67d2d7f --- /dev/null +++ b/API/Controllers/DeviceController.cs @@ -0,0 +1,114 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using API.Data; +using API.Data.Repositories; +using API.DTOs.Device; +using API.Extensions; +using API.Services; +using API.SignalR; +using ExCSS; +using Kavita.Common; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; + +namespace API.Controllers; + +/// +/// Responsible interacting and creating Devices +/// +public class DeviceController : BaseApiController +{ + private readonly IUnitOfWork _unitOfWork; + private readonly IDeviceService _deviceService; + private readonly IEmailService _emailService; + private readonly IEventHub _eventHub; + + public DeviceController(IUnitOfWork unitOfWork, IDeviceService deviceService, IEmailService emailService, IEventHub eventHub) + { + _unitOfWork = unitOfWork; + _deviceService = deviceService; + _emailService = emailService; + _eventHub = eventHub; + } + + + [HttpPost("create")] + public async Task CreateOrUpdateDevice(CreateDeviceDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Devices); + var device = await _deviceService.Create(dto, user); + + if (device == null) return BadRequest("There was an error when creating the device"); + + return Ok(); + } + + [HttpPost("update")] + public async Task UpdateDevice(UpdateDeviceDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Devices); + var device = await _deviceService.Update(dto, user); + + if (device == null) return BadRequest("There was an error when updating the device"); + + return Ok(); + } + + /// + /// Deletes the device from the user + /// + /// + /// + [HttpDelete] + public async Task DeleteDevice(int deviceId) + { + if (deviceId <= 0) return BadRequest("Not a valid deviceId"); + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Devices); + if (await _deviceService.Delete(user, deviceId)) return Ok(); + + return BadRequest("Could not delete device"); + } + + [HttpGet] + public async Task>> GetDevices() + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.DeviceRepository.GetDevicesForUserAsync(userId)); + } + + [HttpPost("send-to")] + public async Task SendToDevice(SendToDeviceDto dto) + { + if (dto.ChapterIds.Any(i => i < 0)) return BadRequest("ChapterIds must be greater than 0"); + if (dto.DeviceId < 0) return BadRequest("DeviceId must be greater than 0"); + + if (await _emailService.IsDefaultEmailService()) + return BadRequest("Send to device cannot be used with Kavita's email service. Please configure your own."); + + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + await _eventHub.SendMessageToAsync(MessageFactory.NotificationProgress, MessageFactory.SendingToDeviceEvent($"Transferring files to your device", "started"), userId); + try + { + var success = await _deviceService.SendTo(dto.ChapterIds, dto.DeviceId); + if (success) return Ok(); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + finally + { + await _eventHub.SendMessageToAsync(MessageFactory.SendingToDevice, MessageFactory.SendingToDeviceEvent($"Transferring files to your device", "ended"), userId); + } + + return BadRequest("There was an error sending the file to the device"); + } + + + +} + + diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs index 8753202f8..a2fae1b9c 100644 --- a/API/Controllers/DownloadController.cs +++ b/API/Controllers/DownloadController.cs @@ -16,207 +16,209 @@ using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; -namespace API.Controllers +namespace API.Controllers; + +/// +/// All APIs related to downloading entities from the system. Requires Download Role or Admin Role. +/// +[Authorize(Policy="RequireDownloadRole")] +public class DownloadController : BaseApiController { - /// - /// All APIs related to downloading entities from the system. Requires Download Role or Admin Role. - /// - [Authorize(Policy="RequireDownloadRole")] - public class DownloadController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IArchiveService _archiveService; + private readonly IDirectoryService _directoryService; + private readonly IDownloadService _downloadService; + private readonly IEventHub _eventHub; + private readonly ILogger _logger; + private readonly IBookmarkService _bookmarkService; + private readonly IAccountService _accountService; + private const string DefaultContentType = "application/octet-stream"; + + public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService, + IDownloadService downloadService, IEventHub eventHub, ILogger logger, IBookmarkService bookmarkService, + IAccountService accountService) { - private readonly IUnitOfWork _unitOfWork; - private readonly IArchiveService _archiveService; - private readonly IDirectoryService _directoryService; - private readonly IDownloadService _downloadService; - private readonly IEventHub _eventHub; - private readonly ILogger _logger; - private readonly IBookmarkService _bookmarkService; - private const string DefaultContentType = "application/octet-stream"; - - public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService, - IDownloadService downloadService, IEventHub eventHub, ILogger logger, IBookmarkService bookmarkService) - { - _unitOfWork = unitOfWork; - _archiveService = archiveService; - _directoryService = directoryService; - _downloadService = downloadService; - _eventHub = eventHub; - _logger = logger; - _bookmarkService = bookmarkService; - } - - /// - /// For a given volume, return the size in bytes - /// - /// - /// - [HttpGet("volume-size")] - public async Task> GetVolumeSize(int volumeId) - { - var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); - return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); - } - - /// - /// For a given chapter, return the size in bytes - /// - /// - /// - [HttpGet("chapter-size")] - public async Task> GetChapterSize(int chapterId) - { - var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); - return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); - } - - /// - /// For a series, return the size in bytes - /// - /// - /// - [HttpGet("series-size")] - public async Task> GetSeriesSize(int seriesId) - { - var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); - return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); - } - - - /// - /// Downloads all chapters within a volume. If the chapters are multiple zips, they will all be zipped up. - /// - /// - /// - [Authorize(Policy="RequireDownloadRole")] - [HttpGet("volume")] - public async Task DownloadVolume(int volumeId) - { - if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); - - var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); - var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(volumeId); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); - try - { - return await DownloadFiles(files, $"download_{User.GetUsername()}_v{volumeId}", $"{series.Name} - Volume {volume.Number}.zip"); - } - catch (KavitaException ex) - { - return BadRequest(ex.Message); - } - } - - private async Task HasDownloadPermission() - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - return await _downloadService.HasDownloadPermission(user); - } - - private ActionResult GetFirstFileDownload(IEnumerable files) - { - var (zipFile, contentType, fileDownloadName) = _downloadService.GetFirstFileDownload(files); - return PhysicalFile(zipFile, contentType, fileDownloadName, true); - } - - /// - /// Returns the zip for a single chapter. If the chapter contains multiple files, they will be zipped. - /// - /// - /// - [HttpGet("chapter")] - public async Task DownloadChapter(int chapterId) - { - if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); - var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); - var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); - var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(chapter.VolumeId); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); - try - { - return await DownloadFiles(files, $"download_{User.GetUsername()}_c{chapterId}", $"{series.Name} - Chapter {chapter.Number}.zip"); - } - catch (KavitaException ex) - { - return BadRequest(ex.Message); - } - } - - private async Task DownloadFiles(ICollection files, string tempFolder, string downloadName) - { - try - { - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.DownloadProgressEvent(User.GetUsername(), - Path.GetFileNameWithoutExtension(downloadName), 0F, "started")); - if (files.Count == 1) - { - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.DownloadProgressEvent(User.GetUsername(), - Path.GetFileNameWithoutExtension(downloadName), 1F, "ended")); - return GetFirstFileDownload(files); - } - - var filePath = _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), tempFolder); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.DownloadProgressEvent(User.GetUsername(), - Path.GetFileNameWithoutExtension(downloadName), 1F, "ended")); - return PhysicalFile(filePath, DefaultContentType, downloadName, true); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an exception when trying to download files"); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.DownloadProgressEvent(User.GetUsername(), - Path.GetFileNameWithoutExtension(downloadName), 1F, "ended")); - throw; - } - } - - [HttpGet("series")] - public async Task DownloadSeries(int seriesId) - { - if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); - var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); - try - { - return await DownloadFiles(files, $"download_{User.GetUsername()}_s{seriesId}", $"{series.Name}.zip"); - } - catch (KavitaException ex) - { - return BadRequest(ex.Message); - } - } - - /// - /// Downloads all bookmarks in a zip for - /// - /// - /// - [HttpPost("bookmarks")] - public async Task DownloadBookmarkPages(DownloadBookmarkDto downloadBookmarkDto) - { - if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); - if (!downloadBookmarkDto.Bookmarks.Any()) return BadRequest("Bookmarks cannot be empty"); - - // We know that all bookmarks will be for one single seriesId - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(downloadBookmarkDto.Bookmarks.First().SeriesId); - - var files = await _bookmarkService.GetBookmarkFilesById(downloadBookmarkDto.Bookmarks.Select(b => b.Id)); - - var filename = $"{series.Name} - Bookmarks.zip"; - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 0F)); - var seriesIds = string.Join("_", downloadBookmarkDto.Bookmarks.Select(b => b.SeriesId).Distinct()); - var filePath = _archiveService.CreateZipForDownload(files, - $"download_{user.Id}_{seriesIds}_bookmarks"); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 1F)); - - - return PhysicalFile(filePath, DefaultContentType, filename, true); - } - + _unitOfWork = unitOfWork; + _archiveService = archiveService; + _directoryService = directoryService; + _downloadService = downloadService; + _eventHub = eventHub; + _logger = logger; + _bookmarkService = bookmarkService; + _accountService = accountService; } + + /// + /// For a given volume, return the size in bytes + /// + /// + /// + [HttpGet("volume-size")] + public async Task> GetVolumeSize(int volumeId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); + return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + /// + /// For a given chapter, return the size in bytes + /// + /// + /// + [HttpGet("chapter-size")] + public async Task> GetChapterSize(int chapterId) + { + var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); + return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + /// + /// For a series, return the size in bytes + /// + /// + /// + [HttpGet("series-size")] + public async Task> GetSeriesSize(int seriesId) + { + var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); + return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + + /// + /// Downloads all chapters within a volume. If the chapters are multiple zips, they will all be zipped up. + /// + /// + /// + [Authorize(Policy="RequireDownloadRole")] + [HttpGet("volume")] + public async Task DownloadVolume(int volumeId) + { + if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); + + var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); + var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(volumeId); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); + try + { + return await DownloadFiles(files, $"download_{User.GetUsername()}_v{volumeId}", $"{series.Name} - Volume {volume.Number}.zip"); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + private async Task HasDownloadPermission() + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + return await _accountService.HasDownloadPermission(user); + } + + private ActionResult GetFirstFileDownload(IEnumerable files) + { + var (zipFile, contentType, fileDownloadName) = _downloadService.GetFirstFileDownload(files); + return PhysicalFile(zipFile, contentType, fileDownloadName, true); + } + + /// + /// Returns the zip for a single chapter. If the chapter contains multiple files, they will be zipped. + /// + /// + /// + [HttpGet("chapter")] + public async Task DownloadChapter(int chapterId) + { + if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); + var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); + var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); + var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(chapter.VolumeId); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); + try + { + return await DownloadFiles(files, $"download_{User.GetUsername()}_c{chapterId}", $"{series.Name} - Chapter {chapter.Number}.zip"); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + private async Task DownloadFiles(ICollection files, string tempFolder, string downloadName) + { + try + { + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.DownloadProgressEvent(User.GetUsername(), + Path.GetFileNameWithoutExtension(downloadName), 0F, "started")); + if (files.Count == 1) + { + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.DownloadProgressEvent(User.GetUsername(), + Path.GetFileNameWithoutExtension(downloadName), 1F, "ended")); + return GetFirstFileDownload(files); + } + + var filePath = _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), tempFolder); + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.DownloadProgressEvent(User.GetUsername(), + Path.GetFileNameWithoutExtension(downloadName), 1F, "ended")); + return PhysicalFile(filePath, DefaultContentType, downloadName, true); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception when trying to download files"); + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.DownloadProgressEvent(User.GetUsername(), + Path.GetFileNameWithoutExtension(downloadName), 1F, "ended")); + throw; + } + } + + [HttpGet("series")] + public async Task DownloadSeries(int seriesId) + { + if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); + var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); + try + { + return await DownloadFiles(files, $"download_{User.GetUsername()}_s{seriesId}", $"{series.Name}.zip"); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + /// + /// Downloads all bookmarks in a zip for + /// + /// + /// + [HttpPost("bookmarks")] + public async Task DownloadBookmarkPages(DownloadBookmarkDto downloadBookmarkDto) + { + if (!await HasDownloadPermission()) return BadRequest("You do not have permission"); + if (!downloadBookmarkDto.Bookmarks.Any()) return BadRequest("Bookmarks cannot be empty"); + + // We know that all bookmarks will be for one single seriesId + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(downloadBookmarkDto.Bookmarks.First().SeriesId); + + var files = await _bookmarkService.GetBookmarkFilesById(downloadBookmarkDto.Bookmarks.Select(b => b.Id)); + + var filename = $"{series.Name} - Bookmarks.zip"; + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 0F)); + var seriesIds = string.Join("_", downloadBookmarkDto.Bookmarks.Select(b => b.SeriesId).Distinct()); + var filePath = _archiveService.CreateZipForDownload(files, + $"download_{user.Id}_{seriesIds}_bookmarks"); + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.DownloadProgressEvent(User.GetUsername(), Path.GetFileNameWithoutExtension(filename), 1F)); + + + return PhysicalFile(filePath, DefaultContentType, filename, true); + } + } diff --git a/API/Controllers/FallbackController.cs b/API/Controllers/FallbackController.cs index a765269b8..2f5d7fceb 100644 --- a/API/Controllers/FallbackController.cs +++ b/API/Controllers/FallbackController.cs @@ -1,7 +1,9 @@ -using System.IO; +using System; +using System.IO; using API.Services; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; namespace API.Controllers; diff --git a/API/Controllers/HealthController.cs b/API/Controllers/HealthController.cs index 8d588fb44..c0d44582f 100644 --- a/API/Controllers/HealthController.cs +++ b/API/Controllers/HealthController.cs @@ -9,7 +9,7 @@ namespace API.Controllers; public class HealthController : BaseApiController { - [HttpGet()] + [HttpGet] public ActionResult GetHealth() { return Ok("Ok"); diff --git a/API/Controllers/ImageController.cs b/API/Controllers/ImageController.cs index f83df2068..96c27ede7 100644 --- a/API/Controllers/ImageController.cs +++ b/API/Controllers/ImageController.cs @@ -7,147 +7,146 @@ using API.Services; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; -namespace API.Controllers +namespace API.Controllers; + +/// +/// Responsible for servicing up images stored in Kavita for entities +/// +[AllowAnonymous] +public class ImageController : BaseApiController { - /// - /// Responsible for servicing up images stored in Kavita for entities - /// - [AllowAnonymous] - public class ImageController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IDirectoryService _directoryService; + + /// + public ImageController(IUnitOfWork unitOfWork, IDirectoryService directoryService) { - private readonly IUnitOfWork _unitOfWork; - private readonly IDirectoryService _directoryService; + _unitOfWork = unitOfWork; + _directoryService = directoryService; + } - /// - public ImageController(IUnitOfWork unitOfWork, IDirectoryService directoryService) - { - _unitOfWork = unitOfWork; - _directoryService = directoryService; - } + /// + /// Returns cover image for Chapter + /// + /// + /// + [HttpGet("chapter-cover")] + [ResponseCache(CacheProfileName = "Images")] + public async Task GetChapterCoverImage(int chapterId) + { + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.ChapterRepository.GetChapterCoverImageAsync(chapterId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - /// - /// Returns cover image for Chapter - /// - /// - /// - [HttpGet("chapter-cover")] - [ResponseCache(CacheProfileName = "Images")] - public async Task GetChapterCoverImage(int chapterId) - { - var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.ChapterRepository.GetChapterCoverImageAsync(chapterId)); - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); - var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); + } - return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); - } + /// + /// Returns cover image for Volume + /// + /// + /// + [HttpGet("volume-cover")] + [ResponseCache(CacheProfileName = "Images")] + public async Task GetVolumeCoverImage(int volumeId) + { + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.VolumeRepository.GetVolumeCoverImageAsync(volumeId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - /// - /// Returns cover image for Volume - /// - /// - /// - [HttpGet("volume-cover")] - [ResponseCache(CacheProfileName = "Images")] - public async Task GetVolumeCoverImage(int volumeId) - { - var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.VolumeRepository.GetVolumeCoverImageAsync(volumeId)); - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); - var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); + } - return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); - } + /// + /// Returns cover image for Series + /// + /// Id of Series + /// + [ResponseCache(CacheProfileName = "Images")] + [HttpGet("series-cover")] + public async Task GetSeriesCoverImage(int seriesId) + { + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.SeriesRepository.GetSeriesCoverImageAsync(seriesId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - /// - /// Returns cover image for Series - /// - /// Id of Series - /// - [ResponseCache(CacheProfileName = "Images")] - [HttpGet("series-cover")] - public async Task GetSeriesCoverImage(int seriesId) - { - var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.SeriesRepository.GetSeriesCoverImageAsync(seriesId)); - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); - var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); + Response.AddCacheHeader(path); - Response.AddCacheHeader(path); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); + } - return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); - } + /// + /// Returns cover image for Collection Tag + /// + /// + /// + [HttpGet("collection-cover")] + [ResponseCache(CacheProfileName = "Images")] + public async Task GetCollectionCoverImage(int collectionTagId) + { + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.CollectionTagRepository.GetCoverImageAsync(collectionTagId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - /// - /// Returns cover image for Collection Tag - /// - /// - /// - [HttpGet("collection-cover")] - [ResponseCache(CacheProfileName = "Images")] - public async Task GetCollectionCoverImage(int collectionTagId) - { - var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.CollectionTagRepository.GetCoverImageAsync(collectionTagId)); - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); - var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); + } - return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); - } + /// + /// Returns cover image for a Reading List + /// + /// + /// + [HttpGet("readinglist-cover")] + [ResponseCache(CacheProfileName = "Images")] + public async Task GetReadingListCoverImage(int readingListId) + { + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.ReadingListRepository.GetCoverImageAsync(readingListId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - /// - /// Returns cover image for a Reading List - /// - /// - /// - [HttpGet("readinglist-cover")] - [ResponseCache(CacheProfileName = "Images")] - public async Task GetReadingListCoverImage(int readingListId) - { - var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.ReadingListRepository.GetCoverImageAsync(readingListId)); - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); - var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); + } - return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); - } + /// + /// Returns image for a given bookmark page + /// + /// This request is served unauthenticated, but user must be passed via api key to validate + /// + /// Starts at 0 + /// API Key for user. Needed to authenticate request + /// + [HttpGet("bookmark")] + [ResponseCache(CacheProfileName = "Images")] + public async Task GetBookmarkImage(int chapterId, int pageNum, string apiKey) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); + var bookmark = await _unitOfWork.UserRepository.GetBookmarkForPage(pageNum, chapterId, userId); + if (bookmark == null) return BadRequest("Bookmark does not exist"); - /// - /// Returns image for a given bookmark page - /// - /// This request is served unauthenticated, but user must be passed via api key to validate - /// - /// Starts at 0 - /// API Key for user. Needed to authenticate request - /// - [HttpGet("bookmark")] - [ResponseCache(CacheProfileName = "Images")] - public async Task GetBookmarkImage(int chapterId, int pageNum, string apiKey) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); - var bookmark = await _unitOfWork.UserRepository.GetBookmarkForPage(pageNum, chapterId, userId); - if (bookmark == null) return BadRequest("Bookmark does not exist"); + var bookmarkDirectory = + (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BookmarkDirectory)).Value; + var file = new FileInfo(Path.Join(bookmarkDirectory, bookmark.FileName)); + var format = Path.GetExtension(file.FullName).Replace(".", ""); - var bookmarkDirectory = - (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BookmarkDirectory)).Value; - var file = new FileInfo(Path.Join(bookmarkDirectory, bookmark.FileName)); - var format = Path.GetExtension(file.FullName).Replace(".", ""); + return PhysicalFile(file.FullName, "image/" + format, Path.GetFileName(file.FullName)); + } - return PhysicalFile(file.FullName, "image/" + format, Path.GetFileName(file.FullName)); - } + /// + /// Returns a temp coverupload image + /// + /// Filename of file. This is used with upload/upload-by-url + /// + [Authorize(Policy="RequireAdminRole")] + [HttpGet("cover-upload")] + [ResponseCache(CacheProfileName = "Images")] + public ActionResult GetCoverUploadImage(string filename) + { + if (filename.Contains("..")) return BadRequest("Invalid Filename"); - /// - /// Returns a temp coverupload image - /// - /// Filename of file. This is used with upload/upload-by-url - /// - [Authorize(Policy="RequireAdminRole")] - [HttpGet("cover-upload")] - [ResponseCache(CacheProfileName = "Images")] - public ActionResult GetCoverUploadImage(string filename) - { - if (filename.Contains("..")) return BadRequest("Invalid Filename"); + var path = Path.Join(_directoryService.TempDirectory, filename); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"File does not exist"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - var path = Path.Join(_directoryService.TempDirectory, filename); - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"File does not exist"); - var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); - - return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); - } + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); } } diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 3a387d83e..202d6b2cb 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -11,6 +11,7 @@ using API.DTOs.Search; using API.DTOs.System; using API.Entities; using API.Entities.Enums; +using API.Entities.Metadata; using API.Extensions; using API.Services; using API.Services.Tasks.Scanner; @@ -22,323 +23,315 @@ using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; using TaskScheduler = API.Services.TaskScheduler; -namespace API.Controllers +namespace API.Controllers; + +[Authorize] +public class LibraryController : BaseApiController { - [Authorize] - public class LibraryController : BaseApiController + private readonly IDirectoryService _directoryService; + private readonly ILogger _logger; + private readonly IMapper _mapper; + private readonly ITaskScheduler _taskScheduler; + private readonly IUnitOfWork _unitOfWork; + private readonly IEventHub _eventHub; + private readonly ILibraryWatcher _libraryWatcher; + + public LibraryController(IDirectoryService directoryService, + ILogger logger, IMapper mapper, ITaskScheduler taskScheduler, + IUnitOfWork unitOfWork, IEventHub eventHub, ILibraryWatcher libraryWatcher) { - private readonly IDirectoryService _directoryService; - private readonly ILogger _logger; - private readonly IMapper _mapper; - private readonly ITaskScheduler _taskScheduler; - private readonly IUnitOfWork _unitOfWork; - private readonly IEventHub _eventHub; - private readonly ILibraryWatcher _libraryWatcher; + _directoryService = directoryService; + _logger = logger; + _mapper = mapper; + _taskScheduler = taskScheduler; + _unitOfWork = unitOfWork; + _eventHub = eventHub; + _libraryWatcher = libraryWatcher; + } - public LibraryController(IDirectoryService directoryService, - ILogger logger, IMapper mapper, ITaskScheduler taskScheduler, - IUnitOfWork unitOfWork, IEventHub eventHub, ILibraryWatcher libraryWatcher) + /// + /// Creates a new Library. Upon library creation, adds new library to all Admin accounts. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("create")] + public async Task AddLibrary(CreateLibraryDto createLibraryDto) + { + if (await _unitOfWork.LibraryRepository.LibraryExists(createLibraryDto.Name)) { - _directoryService = directoryService; - _logger = logger; - _mapper = mapper; - _taskScheduler = taskScheduler; - _unitOfWork = unitOfWork; - _eventHub = eventHub; - _libraryWatcher = libraryWatcher; + return BadRequest("Library name already exists. Please choose a unique name to the server."); } - /// - /// Creates a new Library. Upon library creation, adds new library to all Admin accounts. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("create")] - public async Task AddLibrary(CreateLibraryDto createLibraryDto) + var library = new Library { - if (await _unitOfWork.LibraryRepository.LibraryExists(createLibraryDto.Name)) + Name = createLibraryDto.Name, + Type = createLibraryDto.Type, + Folders = createLibraryDto.Folders.Select(x => new FolderPath {Path = x}).ToList() + }; + + _unitOfWork.LibraryRepository.Add(library); + + var admins = (await _unitOfWork.UserRepository.GetAdminUsersAsync()).ToList(); + foreach (var admin in admins) + { + admin.Libraries ??= new List(); + admin.Libraries.Add(library); + } + + + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue. Please try again."); + + _logger.LogInformation("Created a new library: {LibraryName}", library.Name); + await _libraryWatcher.RestartWatching(); + _taskScheduler.ScanLibrary(library.Id); + await _eventHub.SendMessageAsync(MessageFactory.LibraryModified, + MessageFactory.LibraryModifiedEvent(library.Id, "create"), false); + return Ok(); + } + + /// + /// Returns a list of directories for a given path. If path is empty, returns root drives. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("list")] + public ActionResult> GetDirectories(string path) + { + if (string.IsNullOrEmpty(path)) + { + return Ok(Directory.GetLogicalDrives().Select(d => new DirectoryDto() { - return BadRequest("Library name already exists. Please choose a unique name to the server."); + Name = d, + FullPath = d + })); + } + + if (!Directory.Exists(path)) return BadRequest("This is not a valid path"); + + return Ok(_directoryService.ListDirectory(path)); + } + + + [HttpGet] + public async Task>> GetLibraries() + { + return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername())); + } + + [HttpGet("jump-bar")] + public async Task>> GetJumpBar(int libraryId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + if (!await _unitOfWork.UserRepository.HasAccessToLibrary(libraryId, userId)) return BadRequest("User does not have access to library"); + + return Ok(_unitOfWork.LibraryRepository.GetJumpBarAsync(libraryId)); + } + + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("grant-access")] + public async Task> UpdateUserLibraries(UpdateLibraryForUserDto updateLibraryForUserDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(updateLibraryForUserDto.Username); + if (user == null) return BadRequest("Could not validate user"); + + var libraryString = string.Join(",", updateLibraryForUserDto.SelectedLibraries.Select(x => x.Name)); + _logger.LogInformation("Granting user {UserName} access to: {Libraries}", updateLibraryForUserDto.Username, libraryString); + + var allLibraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync(); + foreach (var library in allLibraries) + { + library.AppUsers ??= new List(); + var libraryContainsUser = library.AppUsers.Any(u => u.UserName == user.UserName); + var libraryIsSelected = updateLibraryForUserDto.SelectedLibraries.Any(l => l.Id == library.Id); + if (libraryContainsUser && !libraryIsSelected) + { + // Remove + library.AppUsers.Remove(user); + } + else if (!libraryContainsUser && libraryIsSelected) + { + library.AppUsers.Add(user); } - var library = new Library - { - Name = createLibraryDto.Name, - Type = createLibraryDto.Type, - Folders = createLibraryDto.Folders.Select(x => new FolderPath {Path = x}).ToList() - }; + } - _unitOfWork.LibraryRepository.Add(library); + if (!_unitOfWork.HasChanges()) + { + _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); + return Ok(_mapper.Map(user)); + } - var admins = (await _unitOfWork.UserRepository.GetAdminUsersAsync()).ToList(); - foreach (var admin in admins) + if (await _unitOfWork.CommitAsync()) + { + _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); + return Ok(_mapper.Map(user)); + } + + + return BadRequest("There was a critical issue. Please try again."); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("scan")] + public ActionResult Scan(int libraryId, bool force = false) + { + _taskScheduler.ScanLibrary(libraryId, force); + return Ok(); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("refresh-metadata")] + public ActionResult RefreshMetadata(int libraryId, bool force = true) + { + _taskScheduler.RefreshMetadata(libraryId, force); + return Ok(); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("analyze")] + public ActionResult Analyze(int libraryId) + { + _taskScheduler.AnalyzeFilesForLibrary(libraryId, true); + return Ok(); + } + + /// + /// Given a valid path, will invoke either a Scan Series or Scan Library. If the folder does not exist within Kavita, the request will be ignored + /// + /// + /// + [AllowAnonymous] + [HttpPost("scan-folder")] + public async Task ScanFolder(ScanFolderDto dto) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(dto.ApiKey); + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + + // Validate user has Admin privileges + var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); + if (!isAdmin) return BadRequest("API key must belong to an admin"); + + if (dto.FolderPath.Contains("..")) return BadRequest("Invalid Path"); + + dto.FolderPath = Services.Tasks.Scanner.Parser.Parser.NormalizePath(dto.FolderPath); + + var libraryFolder = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()) + .SelectMany(l => l.Folders) + .Distinct() + .Select(Services.Tasks.Scanner.Parser.Parser.NormalizePath); + + var seriesFolder = _directoryService.FindHighestDirectoriesFromFiles(libraryFolder, + new List() {dto.FolderPath}); + + _taskScheduler.ScanFolder(seriesFolder.Keys.Count == 1 ? seriesFolder.Keys.First() : dto.FolderPath); + + return Ok(); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpDelete("delete")] + public async Task> DeleteLibrary(int libraryId) + { + var username = User.GetUsername(); + _logger.LogInformation("Library {LibraryId} is being deleted by {UserName}", libraryId, username); + var series = await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId); + var seriesIds = series.Select(x => x.Id).ToArray(); + var chapterIds = + await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(seriesIds); + + try + { + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None); + if (TaskScheduler.HasScanTaskRunningForLibrary(libraryId)) { - admin.Libraries ??= new List(); - admin.Libraries.Add(library); + // TODO: Figure out how to cancel a job + + _logger.LogInformation("User is attempting to delete a library while a scan is in progress"); + return BadRequest( + "You cannot delete a library while a scan is in progress. Please wait for scan to continue then try to delete"); } - - if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue. Please try again."); - - _logger.LogInformation("Created a new library: {LibraryName}", library.Name); - await _libraryWatcher.RestartWatching(); - _taskScheduler.ScanLibrary(library.Id); - await _eventHub.SendMessageAsync(MessageFactory.LibraryModified, - MessageFactory.LibraryModifiedEvent(library.Id, "create"), false); - return Ok(); - } - - /// - /// Returns a list of directories for a given path. If path is empty, returns root drives. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("list")] - public ActionResult> GetDirectories(string path) - { - if (string.IsNullOrEmpty(path)) + // Due to a bad schema that I can't figure out how to fix, we need to erase all RelatedSeries before we delete the library + // Aka SeriesRelation has an invalid foreign key + foreach (var s in await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(library.Id, + SeriesIncludes.Related)) { - return Ok(Directory.GetLogicalDrives().Select(d => new DirectoryDto() - { - Name = d, - FullPath = d - })); + s.Relations = new List(); + _unitOfWork.SeriesRepository.Update(s); } + await _unitOfWork.CommitAsync(); - if (!Directory.Exists(path)) return BadRequest("This is not a valid path"); + _unitOfWork.LibraryRepository.Delete(library); - return Ok(_directoryService.ListDirectory(path)); - } + await _unitOfWork.CommitAsync(); - [HttpGet] - public async Task>> GetLibraries() - { - return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()); - } - - [HttpGet("jump-bar")] - public async Task>> GetJumpBar(int libraryId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - if (!await _unitOfWork.UserRepository.HasAccessToLibrary(libraryId, userId)) return BadRequest("User does not have access to library"); - - return Ok(_unitOfWork.LibraryRepository.GetJumpBarAsync(libraryId)); - } - - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("grant-access")] - public async Task> UpdateUserLibraries(UpdateLibraryForUserDto updateLibraryForUserDto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(updateLibraryForUserDto.Username); - if (user == null) return BadRequest("Could not validate user"); - - var libraryString = string.Join(",", updateLibraryForUserDto.SelectedLibraries.Select(x => x.Name)); - _logger.LogInformation("Granting user {UserName} access to: {Libraries}", updateLibraryForUserDto.Username, libraryString); - - var allLibraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync(); - foreach (var library in allLibraries) + if (chapterIds.Any()) { - library.AppUsers ??= new List(); - var libraryContainsUser = library.AppUsers.Any(u => u.UserName == user.UserName); - var libraryIsSelected = updateLibraryForUserDto.SelectedLibraries.Any(l => l.Id == library.Id); - if (libraryContainsUser && !libraryIsSelected) - { - // Remove - library.AppUsers.Remove(user); - } - else if (!libraryContainsUser && libraryIsSelected) - { - library.AppUsers.Add(user); - } - - } - - if (!_unitOfWork.HasChanges()) - { - _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); - return Ok(_mapper.Map(user)); - } - - if (await _unitOfWork.CommitAsync()) - { - _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); - return Ok(_mapper.Map(user)); - } - - - return BadRequest("There was a critical issue. Please try again."); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("scan")] - public ActionResult Scan(int libraryId, bool force = false) - { - _taskScheduler.ScanLibrary(libraryId, force); - return Ok(); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("refresh-metadata")] - public ActionResult RefreshMetadata(int libraryId, bool force = true) - { - _taskScheduler.RefreshMetadata(libraryId, force); - return Ok(); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("analyze")] - public ActionResult Analyze(int libraryId) - { - _taskScheduler.AnalyzeFilesForLibrary(libraryId, true); - return Ok(); - } - - [HttpGet("libraries")] - public async Task>> GetLibrariesForUser() - { - return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername())); - } - - /// - /// Given a valid path, will invoke either a Scan Series or Scan Library. If the folder does not exist within Kavita, the request will be ignored - /// - /// - /// - [AllowAnonymous] - [HttpPost("scan-folder")] - public async Task ScanFolder(ScanFolderDto dto) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(dto.ApiKey); - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); - // Validate user has Admin privileges - var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); - if (!isAdmin) return BadRequest("API key must belong to an admin"); - if (dto.FolderPath.Contains("..")) return BadRequest("Invalid Path"); - - dto.FolderPath = Services.Tasks.Scanner.Parser.Parser.NormalizePath(dto.FolderPath); - - var libraryFolder = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()) - .SelectMany(l => l.Folders) - .Distinct() - .Select(Services.Tasks.Scanner.Parser.Parser.NormalizePath); - - var seriesFolder = _directoryService.FindHighestDirectoriesFromFiles(libraryFolder, - new List() {dto.FolderPath}); - - _taskScheduler.ScanFolder(seriesFolder.Keys.Count == 1 ? seriesFolder.Keys.First() : dto.FolderPath); - - return Ok(); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpDelete("delete")] - public async Task> DeleteLibrary(int libraryId) - { - var username = User.GetUsername(); - _logger.LogInformation("Library {LibraryId} is being deleted by {UserName}", libraryId, username); - var series = await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId); - var seriesIds = series.Select(x => x.Id).ToArray(); - var chapterIds = - await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(seriesIds); - - try - { - var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None); - if (TaskScheduler.HasScanTaskRunningForLibrary(libraryId)) - { - // TODO: Figure out how to cancel a job - _logger.LogInformation("User is attempting to delete a library while a scan is in progress"); - return BadRequest( - "You cannot delete a library while a scan is in progress. Please wait for scan to continue then try to delete"); - } - _unitOfWork.LibraryRepository.Delete(library); + await _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters(); await _unitOfWork.CommitAsync(); - - if (chapterIds.Any()) - { - await _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters(); - await _unitOfWork.CommitAsync(); - _taskScheduler.CleanupChapters(chapterIds); - } - - await _libraryWatcher.RestartWatching(); - - foreach (var seriesId in seriesIds) - { - await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, - MessageFactory.SeriesRemovedEvent(seriesId, string.Empty, libraryId), false); - } - - await _eventHub.SendMessageAsync(MessageFactory.LibraryModified, - MessageFactory.LibraryModifiedEvent(libraryId, "delete"), false); - return Ok(true); + _taskScheduler.CleanupChapters(chapterIds); } - catch (Exception ex) + + await _libraryWatcher.RestartWatching(); + + foreach (var seriesId in seriesIds) { - _logger.LogError(ex, "There was a critical error trying to delete the library"); - await _unitOfWork.RollbackAsync(); - return Ok(false); - } - } - - /// - /// Updates an existing Library with new name, folders, and/or type. - /// - /// Any folder or type change will invoke a scan. - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("update")] - public async Task UpdateLibrary(UpdateLibraryDto libraryForUserDto) - { - var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id, LibraryIncludes.Folders); - - var originalFolders = library.Folders.Select(x => x.Path).ToList(); - - library.Name = libraryForUserDto.Name; - library.Folders = libraryForUserDto.Folders.Select(s => new FolderPath() {Path = s}).ToList(); - - var typeUpdate = library.Type != libraryForUserDto.Type; - library.Type = libraryForUserDto.Type; - - _unitOfWork.LibraryRepository.Update(library); - - if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue updating the library."); - if (originalFolders.Count != libraryForUserDto.Folders.Count() || typeUpdate) - { - await _libraryWatcher.RestartWatching(); - _taskScheduler.ScanLibrary(library.Id); + await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, + MessageFactory.SeriesRemovedEvent(seriesId, string.Empty, libraryId), false); } - return Ok(); - + await _eventHub.SendMessageAsync(MessageFactory.LibraryModified, + MessageFactory.LibraryModifiedEvent(libraryId, "delete"), false); + return Ok(true); } - - [HttpGet("search")] - public async Task> Search(string queryString) + catch (Exception ex) { - queryString = Uri.UnescapeDataString(queryString).Trim().Replace(@"%", string.Empty).Replace(":", string.Empty); - - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - // Get libraries user has access to - var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList(); - - if (!libraries.Any()) return BadRequest("User does not have access to any libraries"); - if (!libraries.Any()) return BadRequest("User does not have access to any libraries"); - var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); - - var series = await _unitOfWork.SeriesRepository.SearchSeries(user.Id, isAdmin, libraries.Select(l => l.Id).ToArray(), queryString); - - return Ok(series); - } - - [HttpGet("type")] - public async Task> GetLibraryType(int libraryId) - { - return Ok(await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(libraryId)); + _logger.LogError(ex, "There was a critical error trying to delete the library"); + await _unitOfWork.RollbackAsync(); + return Ok(false); } } + + /// + /// Updates an existing Library with new name, folders, and/or type. + /// + /// Any folder or type change will invoke a scan. + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("update")] + public async Task UpdateLibrary(UpdateLibraryDto libraryForUserDto) + { + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id, LibraryIncludes.Folders); + + var originalFolders = library.Folders.Select(x => x.Path).ToList(); + + library.Name = libraryForUserDto.Name; + library.Folders = libraryForUserDto.Folders.Select(s => new FolderPath() {Path = s}).ToList(); + + var typeUpdate = library.Type != libraryForUserDto.Type; + library.Type = libraryForUserDto.Type; + + _unitOfWork.LibraryRepository.Update(library); + + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue updating the library."); + if (originalFolders.Count != libraryForUserDto.Folders.Count() || typeUpdate) + { + await _libraryWatcher.RestartWatching(); + _taskScheduler.ScanLibrary(library.Id); + } + + return Ok(); + + } + + + [HttpGet("type")] + public async Task> GetLibraryType(int libraryId) + { + return Ok(await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(libraryId)); + } } diff --git a/API/Controllers/MetadataController.cs b/API/Controllers/MetadataController.cs index 7aee25c30..b0c9b62be 100644 --- a/API/Controllers/MetadataController.cs +++ b/API/Controllers/MetadataController.cs @@ -8,6 +8,7 @@ using API.DTOs; using API.DTOs.Filtering; using API.DTOs.Metadata; using API.Entities.Enums; +using API.Extensions; using Kavita.Common.Extensions; using Microsoft.AspNetCore.Mvc; @@ -31,15 +32,18 @@ public class MetadataController : BaseApiController [HttpGet("genres")] public async Task>> GetAllGenres(string? libraryIds) { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); var ids = libraryIds?.Split(",").Select(int.Parse).ToList(); if (ids != null && ids.Count > 0) { - return Ok(await _unitOfWork.GenreRepository.GetAllGenreDtosForLibrariesAsync(ids)); + return Ok(await _unitOfWork.GenreRepository.GetAllGenreDtosForLibrariesAsync(ids, userId)); } - return Ok(await _unitOfWork.GenreRepository.GetAllGenreDtosAsync()); + return Ok(await _unitOfWork.GenreRepository.GetAllGenreDtosAsync(userId)); } + + /// /// Fetches people from the instance /// @@ -48,12 +52,13 @@ public class MetadataController : BaseApiController [HttpGet("people")] public async Task>> GetAllPeople(string? libraryIds) { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); var ids = libraryIds?.Split(",").Select(int.Parse).ToList(); if (ids != null && ids.Count > 0) { - return Ok(await _unitOfWork.PersonRepository.GetAllPeopleDtosForLibrariesAsync(ids)); + return Ok(await _unitOfWork.PersonRepository.GetAllPeopleDtosForLibrariesAsync(ids, userId)); } - return Ok(await _unitOfWork.PersonRepository.GetAllPeople()); + return Ok(await _unitOfWork.PersonRepository.GetAllPersonDtosAsync(userId)); } /// @@ -64,19 +69,22 @@ public class MetadataController : BaseApiController [HttpGet("tags")] public async Task>> GetAllTags(string? libraryIds) { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); var ids = libraryIds?.Split(",").Select(int.Parse).ToList(); if (ids != null && ids.Count > 0) { - return Ok(await _unitOfWork.TagRepository.GetAllTagDtosForLibrariesAsync(ids)); + return Ok(await _unitOfWork.TagRepository.GetAllTagDtosForLibrariesAsync(ids, userId)); } - return Ok(await _unitOfWork.TagRepository.GetAllTagDtosAsync()); + return Ok(await _unitOfWork.TagRepository.GetAllTagDtosAsync(userId)); } /// /// Fetches all age ratings from the instance /// /// String separated libraryIds or null for all ratings + /// This API is cached for 1 hour, varying by libraryIds /// + [ResponseCache(CacheProfileName = "5Minute", VaryByQueryKeys = new [] {"libraryIds"})] [HttpGet("age-ratings")] public async Task>> GetAllAgeRatings(string? libraryIds) { @@ -90,14 +98,16 @@ public class MetadataController : BaseApiController { Title = t.ToDescription(), Value = t - })); + }).Where(r => r.Value > AgeRating.NotApplicable)); } /// /// Fetches all publication status' from the instance /// /// String separated libraryIds or null for all publication status + /// This API is cached for 1 hour, varying by libraryIds /// + [ResponseCache(CacheProfileName = "5Minute", VaryByQueryKeys = new [] {"libraryIds"})] [HttpGet("publication-status")] public ActionResult> GetAllPublicationStatus(string? libraryIds) { @@ -115,8 +125,9 @@ public class MetadataController : BaseApiController } /// - /// Fetches all age ratings from the instance + /// Fetches all age languages from the libraries passed (or if none passed, all in the server) /// + /// This does not perform RBS for the user if they have Library access due to the non-sensitive nature of languages /// String separated libraryIds or null for all ratings /// [HttpGet("languages")] @@ -128,15 +139,8 @@ public class MetadataController : BaseApiController return Ok(await _unitOfWork.LibraryRepository.GetAllLanguagesForLibrariesAsync(ids)); } - var englishTag = CultureInfo.GetCultureInfo("en"); - return Ok(new List() - { - new () - { - Title = englishTag.DisplayName, - IsoCode = englishTag.IetfLanguageTag - } - }); + + return Ok(await _unitOfWork.LibraryRepository.GetAllLanguagesForLibrariesAsync()); } [HttpGet("all-languages")] diff --git a/API/Controllers/OPDSController.cs b/API/Controllers/OPDSController.cs index e5165ae42..c13a99079 100644 --- a/API/Controllers/OPDSController.cs +++ b/API/Controllers/OPDSController.cs @@ -32,6 +32,7 @@ public class OpdsController : BaseApiController private readonly ICacheService _cacheService; private readonly IReaderService _readerService; private readonly ISeriesService _seriesService; + private readonly IAccountService _accountService; private readonly XmlSerializer _xmlSerializer; @@ -65,7 +66,8 @@ public class OpdsController : BaseApiController public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService, IDirectoryService directoryService, ICacheService cacheService, - IReaderService readerService, ISeriesService seriesService) + IReaderService readerService, ISeriesService seriesService, + IAccountService accountService) { _unitOfWork = unitOfWork; _downloadService = downloadService; @@ -73,6 +75,7 @@ public class OpdsController : BaseApiController _cacheService = cacheService; _readerService = readerService; _seriesService = seriesService; + _accountService = accountService; _xmlSerializer = new XmlSerializer(typeof(Feed)); _xmlOpenSearchSerializer = new XmlSerializer(typeof(OpenSearchDescription)); @@ -193,8 +196,8 @@ public class OpdsController : BaseApiController var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); - IList tags = isAdmin ? (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()).ToList() - : (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync()).ToList(); + IEnumerable tags = isAdmin ? (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()) + : (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(userId)); var feed = CreateFeed("All Collections", $"{apiKey}/collections", apiKey); @@ -236,7 +239,7 @@ public class OpdsController : BaseApiController } else { - tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); + tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(userId); } var tag = tags.SingleOrDefault(t => t.Id == collectionId); @@ -619,7 +622,7 @@ public class OpdsController : BaseApiController if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) return BadRequest("OPDS is not enabled on this server"); var user = await _unitOfWork.UserRepository.GetUserByIdAsync(await GetUser(apiKey)); - if (!await _downloadService.HasDownloadPermission(user)) + if (!await _accountService.HasDownloadPermission(user)) { return BadRequest("User does not have download permissions"); } diff --git a/API/Controllers/PluginController.cs b/API/Controllers/PluginController.cs index 4a1209710..39f396985 100644 --- a/API/Controllers/PluginController.cs +++ b/API/Controllers/PluginController.cs @@ -7,44 +7,43 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; -namespace API.Controllers +namespace API.Controllers; + +public class PluginController : BaseApiController { - public class PluginController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly ITokenService _tokenService; + private readonly ILogger _logger; + + public PluginController(IUnitOfWork unitOfWork, ITokenService tokenService, ILogger logger) { - private readonly IUnitOfWork _unitOfWork; - private readonly ITokenService _tokenService; - private readonly ILogger _logger; + _unitOfWork = unitOfWork; + _tokenService = tokenService; + _logger = logger; + } - public PluginController(IUnitOfWork unitOfWork, ITokenService tokenService, ILogger logger) + /// + /// Authenticate with the Server given an apiKey. This will log you in by returning the user object and the JWT token. + /// + /// This API is not fully built out and may require more information in later releases + /// API key which will be used to authenticate and return a valid user token back + /// Name of the Plugin + /// + [AllowAnonymous] + [HttpPost("authenticate")] + public async Task> Authenticate([Required] string apiKey, [Required] string pluginName) + { + // NOTE: In order to log information about plugins, we need some Plugin Description information for each request + // Should log into access table so we can tell the user + var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); + if (userId <= 0) return Unauthorized(); + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + _logger.LogInformation("Plugin {PluginName} has authenticated with {UserName} ({UserId})'s API Key", pluginName, user.UserName, userId); + return new UserDto { - _unitOfWork = unitOfWork; - _tokenService = tokenService; - _logger = logger; - } - - /// - /// Authenticate with the Server given an apiKey. This will log you in by returning the user object and the JWT token. - /// - /// This API is not fully built out and may require more information in later releases - /// API key which will be used to authenticate and return a valid user token back - /// Name of the Plugin - /// - [AllowAnonymous] - [HttpPost("authenticate")] - public async Task> Authenticate([Required] string apiKey, [Required] string pluginName) - { - // NOTE: In order to log information about plugins, we need some Plugin Description information for each request - // Should log into access table so we can tell the user - var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); - if (userId <= 0) return Unauthorized(); - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); - _logger.LogInformation("Plugin {PluginName} has authenticated with {UserName} ({UserId})'s API Key", pluginName, user.UserName, userId); - return new UserDto - { - Username = user.UserName, - Token = await _tokenService.CreateToken(user), - ApiKey = user.ApiKey, - }; - } + Username = user.UserName, + Token = await _tokenService.CreateToken(user), + ApiKey = user.ApiKey, + }; } } diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 5569fb9f8..db5db71bb 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -6,755 +6,763 @@ using System.Threading.Tasks; using API.Data; using API.Data.Repositories; using API.DTOs; +using API.DTOs.Filtering; using API.DTOs.Reader; using API.Entities; using API.Entities.Enums; using API.Extensions; using API.Services; using API.Services.Tasks; +using API.SignalR; using Hangfire; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; +using Microsoft.IdentityModel.Tokens; -namespace API.Controllers +namespace API.Controllers; + +/// +/// For all things regarding reading, mainly focusing on non-Book related entities +/// +public class ReaderController : BaseApiController { - /// - /// For all things regarding reading, mainly focusing on non-Book related entities - /// - public class ReaderController : BaseApiController + private readonly ICacheService _cacheService; + private readonly IUnitOfWork _unitOfWork; + private readonly ILogger _logger; + private readonly IReaderService _readerService; + private readonly IBookmarkService _bookmarkService; + private readonly IAccountService _accountService; + private readonly IEventHub _eventHub; + + /// + public ReaderController(ICacheService cacheService, + IUnitOfWork unitOfWork, ILogger logger, + IReaderService readerService, IBookmarkService bookmarkService, + IAccountService accountService, IEventHub eventHub) { - private readonly ICacheService _cacheService; - private readonly IUnitOfWork _unitOfWork; - private readonly ILogger _logger; - private readonly IReaderService _readerService; - private readonly IBookmarkService _bookmarkService; + _cacheService = cacheService; + _unitOfWork = unitOfWork; + _logger = logger; + _readerService = readerService; + _bookmarkService = bookmarkService; + _accountService = accountService; + _eventHub = eventHub; + } - /// - public ReaderController(ICacheService cacheService, - IUnitOfWork unitOfWork, ILogger logger, - IReaderService readerService, IBookmarkService bookmarkService) + /// + /// Returns the PDF for the chapterId. + /// + /// + /// + [HttpGet("pdf")] + [ResponseCache(CacheProfileName = "Hour")] + public async Task GetPdf(int chapterId) + { + var chapter = await _cacheService.Ensure(chapterId); + if (chapter == null) return BadRequest("There was an issue finding pdf file for reading"); + + // Validate the user has access to the PDF + var series = await _unitOfWork.SeriesRepository.GetSeriesForChapter(chapter.Id, + await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername())); + if (series == null) return BadRequest("Invalid Access"); + + try { - _cacheService = cacheService; - _unitOfWork = unitOfWork; - _logger = logger; - _readerService = readerService; - _bookmarkService = bookmarkService; + + var path = _cacheService.GetCachedFile(chapter); + if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"Pdf doesn't exist when it should."); + + return PhysicalFile(path, "application/pdf", Path.GetFileName(path), true); + } + catch (Exception) + { + _cacheService.CleanupChapters(new []{ chapterId }); + throw; + } + } + + /// + /// Returns an image for a given chapter. Side effect: This will cache the chapter images for reading. + /// + /// + /// + /// + [HttpGet("image")] + [ResponseCache(CacheProfileName = "Hour")] + [AllowAnonymous] + public async Task GetImage(int chapterId, int page) + { + if (page < 0) page = 0; + var chapter = await _cacheService.Ensure(chapterId); + if (chapter == null) return BadRequest("There was an issue finding image file for reading"); + + try + { + var path = _cacheService.GetCachedPagePath(chapter, page); + if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}. Try refreshing to allow re-cache."); + var format = Path.GetExtension(path).Replace(".", ""); + + return PhysicalFile(path, "image/" + format, Path.GetFileName(path), true); + } + catch (Exception) + { + _cacheService.CleanupChapters(new []{ chapterId }); + throw; + } + } + + /// + /// Returns an image for a given bookmark series. Side effect: This will cache the bookmark images for reading. + /// + /// + /// Api key for the user the bookmarks are on + /// + /// We must use api key as bookmarks could be leaked to other users via the API + /// + [HttpGet("bookmark-image")] + [ResponseCache(CacheProfileName = "Hour")] + [AllowAnonymous] + public async Task GetBookmarkImage(int seriesId, string apiKey, int page) + { + if (page < 0) page = 0; + var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); + + // NOTE: I'm not sure why I need this flow here + var totalPages = await _cacheService.CacheBookmarkForSeries(userId, seriesId); + if (page > totalPages) + { + page = totalPages; } - /// - /// Returns the PDF for the chapterId. - /// - /// - /// - [HttpGet("pdf")] - [ResponseCache(CacheProfileName = "Hour")] - public async Task GetPdf(int chapterId) + try { - var chapter = await _cacheService.Ensure(chapterId); - if (chapter == null) return BadRequest("There was an issue finding pdf file for reading"); + var path = _cacheService.GetCachedBookmarkPagePath(seriesId, page); + if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); + var format = Path.GetExtension(path).Replace(".", ""); - // Validate the user has access to the PDF - var series = await _unitOfWork.SeriesRepository.GetSeriesForChapter(chapter.Id, - await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername())); - if (series == null) return BadRequest("Invalid Access"); + return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); + } + catch (Exception) + { + _cacheService.CleanupBookmarks(new []{ seriesId }); + throw; + } + } - try + /// + /// Returns various information about a Chapter. Side effect: This will cache the chapter images for reading. + /// + /// + /// + [HttpGet("chapter-info")] + public async Task> GetChapterInfo(int chapterId) + { + if (chapterId <= 0) return null; // This can happen occasionally from UI, we should just ignore + var chapter = await _cacheService.Ensure(chapterId); + if (chapter == null) return BadRequest("Could not find Chapter"); + + var dto = await _unitOfWork.ChapterRepository.GetChapterInfoDtoAsync(chapterId); + if (dto == null) return BadRequest("Please perform a scan on this series or library and try again"); + var mangaFile = (await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId)).First(); + + var info = new ChapterInfoDto() + { + ChapterNumber = dto.ChapterNumber, + VolumeNumber = dto.VolumeNumber, + VolumeId = dto.VolumeId, + FileName = Path.GetFileName(mangaFile.FilePath), + SeriesName = dto.SeriesName, + SeriesFormat = dto.SeriesFormat, + SeriesId = dto.SeriesId, + LibraryId = dto.LibraryId, + IsSpecial = dto.IsSpecial, + Pages = dto.Pages, + ChapterTitle = dto.ChapterTitle ?? string.Empty, + Subtitle = string.Empty, + Title = dto.SeriesName + }; + + if (info.ChapterTitle is {Length: > 0}) { + info.Title += " - " + info.ChapterTitle; + } + + if (info.IsSpecial && dto.VolumeNumber.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume)) + { + info.Subtitle = info.FileName; + } else if (!info.IsSpecial && info.VolumeNumber.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume)) + { + info.Subtitle = _readerService.FormatChapterName(info.LibraryType, true, true) + info.ChapterNumber; + } + else + { + info.Subtitle = "Volume " + info.VolumeNumber; + if (!info.ChapterNumber.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultChapter)) { - - var path = _cacheService.GetCachedFile(chapter); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"Pdf doesn't exist when it should."); - - return PhysicalFile(path, "application/pdf", Path.GetFileName(path), true); - } - catch (Exception) - { - _cacheService.CleanupChapters(new []{ chapterId }); - throw; + info.Subtitle += " " + _readerService.FormatChapterName(info.LibraryType, true, true) + + info.ChapterNumber; } } - /// - /// Returns an image for a given chapter. Side effect: This will cache the chapter images for reading. - /// - /// - /// - /// - [HttpGet("image")] - [ResponseCache(CacheProfileName = "Hour")] - [AllowAnonymous] - public async Task GetImage(int chapterId, int page) + return Ok(info); + } + + /// + /// Returns various information about all bookmark files for a Series. Side effect: This will cache the bookmark images for reading. + /// + /// Series Id for all bookmarks + /// + [HttpGet("bookmark-info")] + public async Task> GetBookmarkInfo(int seriesId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + var totalPages = await _cacheService.CacheBookmarkForSeries(user.Id, seriesId); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.None); + + return Ok(new BookmarkInfoDto() { - if (page < 0) page = 0; - var chapter = await _cacheService.Ensure(chapterId); - if (chapter == null) return BadRequest("There was an issue finding image file for reading"); + SeriesName = series.Name, + SeriesFormat = series.Format, + SeriesId = series.Id, + LibraryId = series.LibraryId, + Pages = totalPages, + }); + } - try - { - var path = _cacheService.GetCachedPagePath(chapter, page); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}. Try refreshing to allow re-cache."); - var format = Path.GetExtension(path).Replace(".", ""); - return PhysicalFile(path, "image/" + format, Path.GetFileName(path), true); - } - catch (Exception) - { - _cacheService.CleanupChapters(new []{ chapterId }); - throw; - } + /// + /// Marks a Series as read. All volumes and chapters will be marked as read during this process. + /// + /// + /// + [HttpPost("mark-read")] + public async Task MarkRead(MarkReadDto markReadDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + await _readerService.MarkSeriesAsRead(user, markReadDto.SeriesId); + + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was an issue saving progress"); + + return Ok(); + } + + + /// + /// Marks a Series as Unread. All volumes and chapters will be marked as unread during this process. + /// + /// + /// + [HttpPost("mark-unread")] + public async Task MarkUnread(MarkReadDto markReadDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + await _readerService.MarkSeriesAsUnread(user, markReadDto.SeriesId); + + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was an issue saving progress"); + + return Ok(); + } + + /// + /// Marks all chapters within a volume as unread + /// + /// + /// + [HttpPost("mark-volume-unread")] + public async Task MarkVolumeAsUnread(MarkVolumeReadDto markVolumeReadDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + + var chapters = await _unitOfWork.ChapterRepository.GetChaptersAsync(markVolumeReadDto.VolumeId); + await _readerService.MarkChaptersAsUnread(user, markVolumeReadDto.SeriesId, chapters); + + if (await _unitOfWork.CommitAsync()) + { + return Ok(); } - /// - /// Returns an image for a given bookmark series. Side effect: This will cache the bookmark images for reading. - /// - /// - /// Api key for the user the bookmarks are on - /// - /// We must use api key as bookmarks could be leaked to other users via the API - /// - [HttpGet("bookmark-image")] - [ResponseCache(CacheProfileName = "Hour")] - [AllowAnonymous] - public async Task GetBookmarkImage(int seriesId, string apiKey, int page) + return BadRequest("Could not save progress"); + } + + /// + /// Marks all chapters within a volume as Read + /// + /// + /// + [HttpPost("mark-volume-read")] + public async Task MarkVolumeAsRead(MarkVolumeReadDto markVolumeReadDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + + var chapters = await _unitOfWork.ChapterRepository.GetChaptersAsync(markVolumeReadDto.VolumeId); + await _readerService.MarkChaptersAsRead(user, markVolumeReadDto.SeriesId, chapters); + await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, + MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, markVolumeReadDto.SeriesId, + markVolumeReadDto.VolumeId, 0, chapters.Sum(c => c.Pages))); + + if (await _unitOfWork.CommitAsync()) { - if (page < 0) page = 0; - var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); - - // NOTE: I'm not sure why I need this flow here - var totalPages = await _cacheService.CacheBookmarkForSeries(userId, seriesId); - if (page > totalPages) - { - page = totalPages; - } - - try - { - var path = _cacheService.GetCachedBookmarkPagePath(seriesId, page); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); - var format = Path.GetExtension(path).Replace(".", ""); - - return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); - } - catch (Exception) - { - _cacheService.CleanupBookmarks(new []{ seriesId }); - throw; - } + return Ok(); } - /// - /// Returns various information about a Chapter. Side effect: This will cache the chapter images for reading. - /// - /// - /// - [HttpGet("chapter-info")] - public async Task> GetChapterInfo(int chapterId) + return BadRequest("Could not save progress"); + } + + + /// + /// Marks all chapters within a list of volumes as Read. All volumes must belong to the same Series. + /// + /// + /// + [HttpPost("mark-multiple-read")] + public async Task MarkMultipleAsRead(MarkVolumesReadDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + user.Progresses ??= new List(); + + var chapterIds = await _unitOfWork.VolumeRepository.GetChapterIdsByVolumeIds(dto.VolumeIds); + foreach (var chapterId in dto.ChapterIds) { - if (chapterId <= 0) return null; // This can happen occasionally from UI, we should just ignore - var chapter = await _cacheService.Ensure(chapterId); - if (chapter == null) return BadRequest("Could not find Chapter"); - - var dto = await _unitOfWork.ChapterRepository.GetChapterInfoDtoAsync(chapterId); - if (dto == null) return BadRequest("Please perform a scan on this series or library and try again"); - var mangaFile = (await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId)).First(); - - var info = new ChapterInfoDto() - { - ChapterNumber = dto.ChapterNumber, - VolumeNumber = dto.VolumeNumber, - VolumeId = dto.VolumeId, - FileName = Path.GetFileName(mangaFile.FilePath), - SeriesName = dto.SeriesName, - SeriesFormat = dto.SeriesFormat, - SeriesId = dto.SeriesId, - LibraryId = dto.LibraryId, - IsSpecial = dto.IsSpecial, - Pages = dto.Pages, - ChapterTitle = dto.ChapterTitle ?? string.Empty, - Subtitle = string.Empty, - Title = dto.SeriesName - }; - - if (info.ChapterTitle is {Length: > 0}) { - info.Title += " - " + info.ChapterTitle; - } - - if (info.IsSpecial && dto.VolumeNumber.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume)) - { - info.Subtitle = info.FileName; - } else if (!info.IsSpecial && info.VolumeNumber.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume)) - { - info.Subtitle = _readerService.FormatChapterName(info.LibraryType, true, true) + info.ChapterNumber; - } - else - { - info.Subtitle = "Volume " + info.VolumeNumber; - if (!info.ChapterNumber.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultChapter)) - { - info.Subtitle += " " + _readerService.FormatChapterName(info.LibraryType, true, true) + - info.ChapterNumber; - } - } - - return Ok(info); + chapterIds.Add(chapterId); } + var chapters = await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds); + await _readerService.MarkChaptersAsRead(user, dto.SeriesId, chapters.ToList()); - /// - /// Returns various information about all bookmark files for a Series. Side effect: This will cache the bookmark images for reading. - /// - /// Series Id for all bookmarks - /// - [HttpGet("bookmark-info")] - public async Task> GetBookmarkInfo(int seriesId) + if (await _unitOfWork.CommitAsync()) { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - var totalPages = await _cacheService.CacheBookmarkForSeries(user.Id, seriesId); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.None); - - return Ok(new BookmarkInfoDto() - { - SeriesName = series.Name, - SeriesFormat = series.Format, - SeriesId = series.Id, - LibraryId = series.LibraryId, - Pages = totalPages, - }); - } - - - /// - /// Marks a Series as read. All volumes and chapters will be marked as read during this process. - /// - /// - /// - [HttpPost("mark-read")] - public async Task MarkRead(MarkReadDto markReadDto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - await _readerService.MarkSeriesAsRead(user, markReadDto.SeriesId); - - if (!await _unitOfWork.CommitAsync()) return BadRequest("There was an issue saving progress"); - return Ok(); } - /// - /// Marks a Series as Unread. All volumes and chapters will be marked as unread during this process. - /// - /// - /// - [HttpPost("mark-unread")] - public async Task MarkUnread(MarkReadDto markReadDto) + return BadRequest("Could not save progress"); + } + + /// + /// Marks all chapters within a list of volumes as Unread. All volumes must belong to the same Series. + /// + /// + /// + [HttpPost("mark-multiple-unread")] + public async Task MarkMultipleAsUnread(MarkVolumesReadDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + user.Progresses ??= new List(); + + var chapterIds = await _unitOfWork.VolumeRepository.GetChapterIdsByVolumeIds(dto.VolumeIds); + foreach (var chapterId in dto.ChapterIds) { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - await _readerService.MarkSeriesAsUnread(user, markReadDto.SeriesId); - - if (!await _unitOfWork.CommitAsync()) return BadRequest("There was an issue saving progress"); + chapterIds.Add(chapterId); + } + var chapters = await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds); + await _readerService.MarkChaptersAsUnread(user, dto.SeriesId, chapters.ToList()); + if (await _unitOfWork.CommitAsync()) + { return Ok(); } - /// - /// Marks all chapters within a volume as unread - /// - /// - /// - [HttpPost("mark-volume-unread")] - public async Task MarkVolumeAsUnread(MarkVolumeReadDto markVolumeReadDto) + return BadRequest("Could not save progress"); + } + + /// + /// Marks all chapters within a list of series as Read. + /// + /// + /// + [HttpPost("mark-multiple-series-read")] + public async Task MarkMultipleSeriesAsRead(MarkMultipleSeriesAsReadDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + user.Progresses ??= new List(); + + var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true); + foreach (var volume in volumes) { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + await _readerService.MarkChaptersAsRead(user, volume.SeriesId, volume.Chapters); + } - var chapters = await _unitOfWork.ChapterRepository.GetChaptersAsync(markVolumeReadDto.VolumeId); - await _readerService.MarkChaptersAsUnread(user, markVolumeReadDto.SeriesId, chapters); + if (await _unitOfWork.CommitAsync()) + { + return Ok(); + } + return BadRequest("Could not save progress"); + } + + /// + /// Marks all chapters within a list of series as Unread. + /// + /// + /// + [HttpPost("mark-multiple-series-unread")] + public async Task MarkMultipleSeriesAsUnread(MarkMultipleSeriesAsReadDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + user.Progresses ??= new List(); + + var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true); + foreach (var volume in volumes) + { + await _readerService.MarkChaptersAsUnread(user, volume.SeriesId, volume.Chapters); + } + + if (await _unitOfWork.CommitAsync()) + { + return Ok(); + } + + return BadRequest("Could not save progress"); + } + + /// + /// Returns Progress (page number) for a chapter for the logged in user + /// + /// + /// + [HttpGet("get-progress")] + public async Task> GetProgress(int chapterId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + var progressBookmark = new ProgressDto() + { + PageNum = 0, + ChapterId = chapterId, + VolumeId = 0, + SeriesId = 0 + }; + if (user.Progresses == null) return Ok(progressBookmark); + var progress = user.Progresses.FirstOrDefault(x => x.AppUserId == user.Id && x.ChapterId == chapterId); + + if (progress != null) + { + progressBookmark.SeriesId = progress.SeriesId; + progressBookmark.VolumeId = progress.VolumeId; + progressBookmark.PageNum = progress.PagesRead; + progressBookmark.BookScrollId = progress.BookScrollId; + } + return Ok(progressBookmark); + } + + /// + /// Save page against Chapter for logged in user + /// + /// + /// + [HttpPost("progress")] + public async Task BookmarkProgress(ProgressDto progressDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + + if (await _readerService.SaveReadingProgress(progressDto, user.Id)) return Ok(true); + + return BadRequest("Could not save progress"); + } + + /// + /// Continue point is the chapter which you should start reading again from. If there is no progress on a series, then the first chapter will be returned (non-special unless only specials). + /// Otherwise, loop through the chapters and volumes in order to find the next chapter which has progress. + /// + /// + [HttpGet("continue-point")] + public async Task> GetContinuePoint(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + + return Ok(await _readerService.GetContinuePoint(seriesId, userId)); + } + + /// + /// Returns if the user has reading progress on the Series + /// + /// + /// + [HttpGet("has-progress")] + public async Task> HasProgress(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.AppUserProgressRepository.HasAnyProgressOnSeriesAsync(seriesId, userId)); + } + + /// + /// Marks every chapter that is sorted below the passed number as Read. This will not mark any specials as read. + /// + /// This is built for Tachiyomi and is not expected to be called by any other place + /// + [Obsolete("Deprecated. Use 'Tachiyomi/mark-chapter-until-as-read'")] + [HttpPost("mark-chapter-until-as-read")] + public async Task> MarkChaptersUntilAsRead(int seriesId, float chapterNumber) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); + user.Progresses ??= new List(); + + // Tachiyomi sends chapter 0.0f when there's no chapters read. + // Due to the encoding for volumes this marks all chapters in volume 0 (loose chapters) as read so we ignore it + if (chapterNumber == 0.0f) return true; + + if (chapterNumber < 1.0f) + { + // This is a hack to track volume number. We need to map it back by x100 + var volumeNumber = int.Parse($"{chapterNumber * 100f}"); + await _readerService.MarkVolumesUntilAsRead(user, seriesId, volumeNumber); + } + else + { + await _readerService.MarkChaptersUntilAsRead(user, seriesId, chapterNumber); + } + + + _unitOfWork.UserRepository.Update(user); + + if (!_unitOfWork.HasChanges()) return Ok(true); + if (await _unitOfWork.CommitAsync()) return Ok(true); + + await _unitOfWork.RollbackAsync(); + return Ok(false); + } + + + /// + /// Returns a list of bookmarked pages for a given Chapter + /// + /// + /// + [HttpGet("chapter-bookmarks")] + public async Task>> GetBookmarks(int chapterId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user.Bookmarks == null) return Ok(Array.Empty()); + return Ok(await _unitOfWork.UserRepository.GetBookmarkDtosForChapter(user.Id, chapterId)); + } + + /// + /// Returns a list of all bookmarked pages for a User + /// + /// Only supports SeriesNameQuery + /// + [HttpPost("all-bookmarks")] + public async Task>> GetAllBookmarks(FilterDto filterDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user.Bookmarks == null) return Ok(Array.Empty()); + + return Ok(await _unitOfWork.UserRepository.GetAllBookmarkDtos(user.Id, filterDto)); + } + + /// + /// Removes all bookmarks for all chapters linked to a Series + /// + /// + /// + [HttpPost("remove-bookmarks")] + public async Task RemoveBookmarks(RemoveBookmarkForSeriesDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user.Bookmarks == null) return Ok("Nothing to remove"); + + try + { + var bookmarksToRemove = user.Bookmarks.Where(bmk => bmk.SeriesId == dto.SeriesId).ToList(); + user.Bookmarks = user.Bookmarks.Where(bmk => bmk.SeriesId != dto.SeriesId).ToList(); _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.CommitAsync()) + if (!_unitOfWork.HasChanges() || await _unitOfWork.CommitAsync()) { - return Ok(); - } - - return BadRequest("Could not save progress"); - } - - /// - /// Marks all chapters within a volume as Read - /// - /// - /// - [HttpPost("mark-volume-read")] - public async Task MarkVolumeAsRead(MarkVolumeReadDto markVolumeReadDto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - - var chapters = await _unitOfWork.ChapterRepository.GetChaptersAsync(markVolumeReadDto.VolumeId); - await _readerService.MarkChaptersAsRead(user, markVolumeReadDto.SeriesId, chapters); - - _unitOfWork.UserRepository.Update(user); - - if (await _unitOfWork.CommitAsync()) - { - return Ok(); - } - - return BadRequest("Could not save progress"); - } - - - /// - /// Marks all chapters within a list of volumes as Read. All volumes must belong to the same Series. - /// - /// - /// - [HttpPost("mark-multiple-read")] - public async Task MarkMultipleAsRead(MarkVolumesReadDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - user.Progresses ??= new List(); - - var chapterIds = await _unitOfWork.VolumeRepository.GetChapterIdsByVolumeIds(dto.VolumeIds); - foreach (var chapterId in dto.ChapterIds) - { - chapterIds.Add(chapterId); - } - var chapters = await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds); - await _readerService.MarkChaptersAsRead(user, dto.SeriesId, chapters); - - _unitOfWork.UserRepository.Update(user); - - if (await _unitOfWork.CommitAsync()) - { - return Ok(); - } - - return BadRequest("Could not save progress"); - } - - /// - /// Marks all chapters within a list of volumes as Unread. All volumes must belong to the same Series. - /// - /// - /// - [HttpPost("mark-multiple-unread")] - public async Task MarkMultipleAsUnread(MarkVolumesReadDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - user.Progresses ??= new List(); - - var chapterIds = await _unitOfWork.VolumeRepository.GetChapterIdsByVolumeIds(dto.VolumeIds); - foreach (var chapterId in dto.ChapterIds) - { - chapterIds.Add(chapterId); - } - var chapters = await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds); - await _readerService.MarkChaptersAsUnread(user, dto.SeriesId, chapters); - - _unitOfWork.UserRepository.Update(user); - - if (await _unitOfWork.CommitAsync()) - { - return Ok(); - } - - return BadRequest("Could not save progress"); - } - - /// - /// Marks all chapters within a list of series as Read. - /// - /// - /// - [HttpPost("mark-multiple-series-read")] - public async Task MarkMultipleSeriesAsRead(MarkMultipleSeriesAsReadDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - user.Progresses ??= new List(); - - var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true); - foreach (var volume in volumes) - { - await _readerService.MarkChaptersAsRead(user, volume.SeriesId, volume.Chapters); - } - - _unitOfWork.UserRepository.Update(user); - - if (await _unitOfWork.CommitAsync()) - { - return Ok(); - } - - return BadRequest("Could not save progress"); - } - - /// - /// Marks all chapters within a list of series as Unread. - /// - /// - /// - [HttpPost("mark-multiple-series-unread")] - public async Task MarkMultipleSeriesAsUnread(MarkMultipleSeriesAsReadDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - user.Progresses ??= new List(); - - var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true); - foreach (var volume in volumes) - { - await _readerService.MarkChaptersAsUnread(user, volume.SeriesId, volume.Chapters); - } - - _unitOfWork.UserRepository.Update(user); - - if (await _unitOfWork.CommitAsync()) - { - return Ok(); - } - - return BadRequest("Could not save progress"); - } - - /// - /// Returns Progress (page number) for a chapter for the logged in user - /// - /// - /// - [HttpGet("get-progress")] - public async Task> GetProgress(int chapterId) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - var progressBookmark = new ProgressDto() - { - PageNum = 0, - ChapterId = chapterId, - VolumeId = 0, - SeriesId = 0 - }; - if (user.Progresses == null) return Ok(progressBookmark); - var progress = user.Progresses.FirstOrDefault(x => x.AppUserId == user.Id && x.ChapterId == chapterId); - - if (progress != null) - { - progressBookmark.SeriesId = progress.SeriesId; - progressBookmark.VolumeId = progress.VolumeId; - progressBookmark.PageNum = progress.PagesRead; - progressBookmark.BookScrollId = progress.BookScrollId; - } - return Ok(progressBookmark); - } - - /// - /// Save page against Chapter for logged in user - /// - /// - /// - [HttpPost("progress")] - public async Task BookmarkProgress(ProgressDto progressDto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - - if (await _readerService.SaveReadingProgress(progressDto, user.Id)) return Ok(true); - - return BadRequest("Could not save progress"); - } - - /// - /// Continue point is the chapter which you should start reading again from. If there is no progress on a series, then the first chapter will be returned (non-special unless only specials). - /// Otherwise, loop through the chapters and volumes in order to find the next chapter which has progress. - /// - /// - [HttpGet("continue-point")] - public async Task> GetContinuePoint(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - - return Ok(await _readerService.GetContinuePoint(seriesId, userId)); - } - - /// - /// Returns if the user has reading progress on the Series - /// - /// - /// - [HttpGet("has-progress")] - public async Task> HasProgress(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.AppUserProgressRepository.HasAnyProgressOnSeriesAsync(seriesId, userId)); - } - - /// - /// Marks every chapter that is sorted below the passed number as Read. This will not mark any specials as read. - /// - /// This is built for Tachiyomi and is not expected to be called by any other place - /// - [Obsolete("Deprecated. Use 'Tachiyomi/mark-chapter-until-as-read'")] - [HttpPost("mark-chapter-until-as-read")] - public async Task> MarkChaptersUntilAsRead(int seriesId, float chapterNumber) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - user.Progresses ??= new List(); - - // Tachiyomi sends chapter 0.0f when there's no chapters read. - // Due to the encoding for volumes this marks all chapters in volume 0 (loose chapters) as read so we ignore it - if (chapterNumber == 0.0f) return true; - - if (chapterNumber < 1.0f) - { - // This is a hack to track volume number. We need to map it back by x100 - var volumeNumber = int.Parse($"{chapterNumber * 100f}"); - await _readerService.MarkVolumesUntilAsRead(user, seriesId, volumeNumber); - } - else - { - await _readerService.MarkChaptersUntilAsRead(user, seriesId, chapterNumber); - } - - - _unitOfWork.UserRepository.Update(user); - - if (!_unitOfWork.HasChanges()) return Ok(true); - if (await _unitOfWork.CommitAsync()) return Ok(true); - - await _unitOfWork.RollbackAsync(); - return Ok(false); - } - - - /// - /// Returns a list of bookmarked pages for a given Chapter - /// - /// - /// - [HttpGet("get-bookmarks")] - public async Task>> GetBookmarks(int chapterId) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok(Array.Empty()); - return Ok(await _unitOfWork.UserRepository.GetBookmarkDtosForChapter(user.Id, chapterId)); - } - - /// - /// Returns a list of all bookmarked pages for a User - /// - /// - [HttpGet("get-all-bookmarks")] - public async Task>> GetAllBookmarks() - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok(Array.Empty()); - return Ok(await _unitOfWork.UserRepository.GetAllBookmarkDtos(user.Id)); - } - - /// - /// Removes all bookmarks for all chapters linked to a Series - /// - /// - /// - [HttpPost("remove-bookmarks")] - public async Task RemoveBookmarks(RemoveBookmarkForSeriesDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok("Nothing to remove"); - - try - { - var bookmarksToRemove = user.Bookmarks.Where(bmk => bmk.SeriesId == dto.SeriesId).ToList(); - user.Bookmarks = user.Bookmarks.Where(bmk => bmk.SeriesId != dto.SeriesId).ToList(); - _unitOfWork.UserRepository.Update(user); - - if (!_unitOfWork.HasChanges() || await _unitOfWork.CommitAsync()) + try { - try - { - await _bookmarkService.DeleteBookmarkFiles(bookmarksToRemove); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue cleaning up old bookmarks"); - } - return Ok(); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an exception when trying to clear bookmarks"); - await _unitOfWork.RollbackAsync(); - } - - return BadRequest("Could not clear bookmarks"); - } - - /// - /// Removes all bookmarks for all chapters linked to a Series - /// - /// - /// - [HttpPost("bulk-remove-bookmarks")] - public async Task BulkRemoveBookmarks(BulkRemoveBookmarkForSeriesDto dto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok("Nothing to remove"); - - try - { - foreach (var seriesId in dto.SeriesIds) - { - var bookmarksToRemove = user.Bookmarks.Where(bmk => bmk.SeriesId == seriesId).ToList(); - user.Bookmarks = user.Bookmarks.Where(bmk => bmk.SeriesId != seriesId).ToList(); - _unitOfWork.UserRepository.Update(user); await _bookmarkService.DeleteBookmarkFiles(bookmarksToRemove); } - - - if (!_unitOfWork.HasChanges() || await _unitOfWork.CommitAsync()) + catch (Exception ex) { - return Ok(); + _logger.LogError(ex, "There was an issue cleaning up old bookmarks"); } - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an exception when trying to clear bookmarks"); - await _unitOfWork.RollbackAsync(); - } - - return BadRequest("Could not clear bookmarks"); - } - - /// - /// Returns all bookmarked pages for a given volume - /// - /// - /// - [HttpGet("get-volume-bookmarks")] - public async Task>> GetBookmarksForVolume(int volumeId) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok(Array.Empty()); - return Ok(await _unitOfWork.UserRepository.GetBookmarkDtosForVolume(user.Id, volumeId)); - } - - /// - /// Returns all bookmarked pages for a given series - /// - /// - /// - [HttpGet("get-series-bookmarks")] - public async Task>> GetBookmarksForSeries(int seriesId) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok(Array.Empty()); - - return Ok(await _unitOfWork.UserRepository.GetBookmarkDtosForSeries(user.Id, seriesId)); - } - - /// - /// Bookmarks a page against a Chapter - /// - /// - /// - [HttpPost("bookmark")] - public async Task BookmarkPage(BookmarkDto bookmarkDto) - { - // Don't let user save past total pages. - bookmarkDto.Page = await _readerService.CapPageToChapter(bookmarkDto.ChapterId, bookmarkDto.Page); - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - var chapter = await _cacheService.Ensure(bookmarkDto.ChapterId); - if (chapter == null) return BadRequest("Could not find cached image. Reload and try again."); - var path = _cacheService.GetCachedPagePath(chapter, bookmarkDto.Page); - - if (await _bookmarkService.BookmarkPage(user, bookmarkDto, path)) - { - BackgroundJob.Enqueue(() => _cacheService.CleanupBookmarkCache(bookmarkDto.SeriesId)); return Ok(); } - - return BadRequest("Could not save bookmark"); } - - /// - /// Removes a bookmarked page for a Chapter - /// - /// - /// - [HttpPost("unbookmark")] - public async Task UnBookmarkPage(BookmarkDto bookmarkDto) + catch (Exception ex) { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); - if (user.Bookmarks == null) return Ok(); - - if (await _bookmarkService.RemoveBookmarkPage(user, bookmarkDto)) - { - BackgroundJob.Enqueue(() => _cacheService.CleanupBookmarkCache(bookmarkDto.SeriesId)); - return Ok(); - } - - return BadRequest("Could not remove bookmark"); - } - - /// - /// Returns the next logical chapter from the series. - /// - /// - /// V1 → V2 → V3 chapter 0 → V3 chapter 10 → SP 01 → SP 02 - /// - /// - /// - /// - /// chapter id for next manga - [HttpGet("next-chapter")] - public async Task> GetNextChapter(int seriesId, int volumeId, int currentChapterId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return await _readerService.GetNextChapterIdAsync(seriesId, volumeId, currentChapterId, userId); - } - - - /// - /// Returns the previous logical chapter from the series. - /// - /// - /// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← SP 01 ← SP 02 - /// - /// - /// - /// - /// chapter id for next manga - [HttpGet("prev-chapter")] - public async Task> GetPreviousChapter(int seriesId, int volumeId, int currentChapterId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return await _readerService.GetPrevChapterIdAsync(seriesId, volumeId, currentChapterId, userId); - } - - /// - /// For the current user, returns an estimate on how long it would take to finish reading the series. - /// - /// For Epubs, this does not check words inside a chapter due to overhead so may not work in all cases. - /// - /// - [HttpGet("time-left")] - public async Task> GetEstimateToCompletion(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); - - // Get all sum of all chapters with progress that is complete then subtract from series. Multiply by modifiers - var progress = await _unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(seriesId, userId); - if (series.Format == MangaFormat.Epub) - { - var chapters = - await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(progress.Select(p => p.ChapterId).ToList()); - // Word count - var progressCount = chapters.Sum(c => c.WordCount); - var wordsLeft = series.WordCount - progressCount; - return _readerService.GetTimeEstimate(wordsLeft, 0, true); - } - - var progressPageCount = progress.Sum(p => p.PagesRead); - var pagesLeft = series.Pages - progressPageCount; - return _readerService.GetTimeEstimate(0, pagesLeft, false); + _logger.LogError(ex, "There was an exception when trying to clear bookmarks"); + await _unitOfWork.RollbackAsync(); } + return BadRequest("Could not clear bookmarks"); } + + /// + /// Removes all bookmarks for all chapters linked to a Series + /// + /// + /// + [HttpPost("bulk-remove-bookmarks")] + public async Task BulkRemoveBookmarks(BulkRemoveBookmarkForSeriesDto dto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user.Bookmarks == null) return Ok("Nothing to remove"); + + try + { + foreach (var seriesId in dto.SeriesIds) + { + var bookmarksToRemove = user.Bookmarks.Where(bmk => bmk.SeriesId == seriesId).ToList(); + user.Bookmarks = user.Bookmarks.Where(bmk => bmk.SeriesId != seriesId).ToList(); + _unitOfWork.UserRepository.Update(user); + await _bookmarkService.DeleteBookmarkFiles(bookmarksToRemove); + } + + + if (!_unitOfWork.HasChanges() || await _unitOfWork.CommitAsync()) + { + return Ok(); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception when trying to clear bookmarks"); + await _unitOfWork.RollbackAsync(); + } + + return BadRequest("Could not clear bookmarks"); + } + + /// + /// Returns all bookmarked pages for a given volume + /// + /// + /// + [HttpGet("volume-bookmarks")] + public async Task>> GetBookmarksForVolume(int volumeId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user.Bookmarks == null) return Ok(Array.Empty()); + return Ok(await _unitOfWork.UserRepository.GetBookmarkDtosForVolume(user.Id, volumeId)); + } + + /// + /// Returns all bookmarked pages for a given series + /// + /// + /// + [HttpGet("series-bookmarks")] + public async Task>> GetBookmarksForSeries(int seriesId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user.Bookmarks == null) return Ok(Array.Empty()); + + return Ok(await _unitOfWork.UserRepository.GetBookmarkDtosForSeries(user.Id, seriesId)); + } + + /// + /// Bookmarks a page against a Chapter + /// + /// This has a side effect of caching the chapter files to disk + /// + /// + [HttpPost("bookmark")] + public async Task BookmarkPage(BookmarkDto bookmarkDto) + { + // Don't let user save past total pages. + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user == null) return new UnauthorizedResult(); + + if (!await _accountService.HasBookmarkPermission(user)) + return BadRequest("You do not have permission to bookmark"); + + var chapter = await _cacheService.Ensure(bookmarkDto.ChapterId); + if (chapter == null) return BadRequest("Could not find cached image. Reload and try again."); + + bookmarkDto.Page = _readerService.CapPageToChapter(chapter, bookmarkDto.Page); + var path = _cacheService.GetCachedPagePath(chapter, bookmarkDto.Page); + + if (!await _bookmarkService.BookmarkPage(user, bookmarkDto, path)) return BadRequest("Could not save bookmark"); + + BackgroundJob.Enqueue(() => _cacheService.CleanupBookmarkCache(bookmarkDto.SeriesId)); + return Ok(); + } + + /// + /// Removes a bookmarked page for a Chapter + /// + /// + /// + [HttpPost("unbookmark")] + public async Task UnBookmarkPage(BookmarkDto bookmarkDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Bookmarks); + if (user == null) return new UnauthorizedResult(); + if (user.Bookmarks.IsNullOrEmpty()) return Ok(); + + if (!await _accountService.HasBookmarkPermission(user)) + return BadRequest("You do not have permission to unbookmark"); + + if (!await _bookmarkService.RemoveBookmarkPage(user, bookmarkDto)) + return BadRequest("Could not remove bookmark"); + BackgroundJob.Enqueue(() => _cacheService.CleanupBookmarkCache(bookmarkDto.SeriesId)); + return Ok(); + } + + /// + /// Returns the next logical chapter from the series. + /// + /// + /// V1 → V2 → V3 chapter 0 → V3 chapter 10 → SP 01 → SP 02 + /// + /// + /// + /// + /// chapter id for next manga + [ResponseCache(CacheProfileName = "Hour", VaryByQueryKeys = new string[] { "seriesId", "volumeId", "currentChapterId"})] + [HttpGet("next-chapter")] + public async Task> GetNextChapter(int seriesId, int volumeId, int currentChapterId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return await _readerService.GetNextChapterIdAsync(seriesId, volumeId, currentChapterId, userId); + } + + + /// + /// Returns the previous logical chapter from the series. + /// + /// + /// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← SP 01 ← SP 02 + /// + /// + /// + /// + /// chapter id for next manga + [ResponseCache(CacheProfileName = "Hour", VaryByQueryKeys = new string[] { "seriesId", "volumeId", "currentChapterId"})] + [HttpGet("prev-chapter")] + public async Task> GetPreviousChapter(int seriesId, int volumeId, int currentChapterId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return await _readerService.GetPrevChapterIdAsync(seriesId, volumeId, currentChapterId, userId); + } + + /// + /// For the current user, returns an estimate on how long it would take to finish reading the series. + /// + /// For Epubs, this does not check words inside a chapter due to overhead so may not work in all cases. + /// + /// + [HttpGet("time-left")] + public async Task> GetEstimateToCompletion(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); + + // Get all sum of all chapters with progress that is complete then subtract from series. Multiply by modifiers + var progress = await _unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(seriesId, userId); + if (series.Format == MangaFormat.Epub) + { + var chapters = + await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(progress.Select(p => p.ChapterId).ToList()); + // Word count + var progressCount = chapters.Sum(c => c.WordCount); + var wordsLeft = series.WordCount - progressCount; + return _readerService.GetTimeEstimate(wordsLeft, 0, true); + } + + var progressPageCount = progress.Sum(p => p.PagesRead); + var pagesLeft = series.Pages - progressPageCount; + return _readerService.GetTimeEstimate(0, pagesLeft, false); + } + } diff --git a/API/Controllers/ReadingListController.cs b/API/Controllers/ReadingListController.cs index 5f2b61ff0..1428e81f9 100644 --- a/API/Controllers/ReadingListController.cs +++ b/API/Controllers/ReadingListController.cs @@ -13,483 +13,488 @@ using API.SignalR; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; -namespace API.Controllers +namespace API.Controllers; + +[Authorize] +public class ReadingListController : BaseApiController { - [Authorize] - public class ReadingListController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IEventHub _eventHub; + private readonly IReadingListService _readingListService; + + public ReadingListController(IUnitOfWork unitOfWork, IEventHub eventHub, IReadingListService readingListService) { - private readonly IUnitOfWork _unitOfWork; - private readonly IEventHub _eventHub; - private readonly IReadingListService _readingListService; - private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); + _unitOfWork = unitOfWork; + _eventHub = eventHub; + _readingListService = readingListService; + } - public ReadingListController(IUnitOfWork unitOfWork, IEventHub eventHub, IReadingListService readingListService) + /// + /// Fetches a single Reading List + /// + /// + /// + [HttpGet] + public async Task>> GetList(int readingListId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.ReadingListRepository.GetReadingListDtoByIdAsync(readingListId, userId)); + } + + /// + /// Returns reading lists (paginated) for a given user. + /// + /// Include Promoted Reading Lists along with user's Reading Lists. Defaults to true + /// Pagination parameters + /// + [HttpPost("lists")] + public async Task>> GetListsForUser([FromQuery] UserParams userParams, bool includePromoted = true) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var items = await _unitOfWork.ReadingListRepository.GetReadingListDtosForUserAsync(userId, includePromoted, + userParams); + Response.AddPaginationHeader(items.CurrentPage, items.PageSize, items.TotalCount, items.TotalPages); + + return Ok(items); + } + + /// + /// Returns all Reading Lists the user has access to that have a series within it. + /// + /// + /// + [HttpGet("lists-for-series")] + public async Task>> GetListsForSeries(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var items = await _unitOfWork.ReadingListRepository.GetReadingListDtosForSeriesAndUserAsync(userId, seriesId, true); + + return Ok(items); + } + + /// + /// Fetches all reading list items for a given list including rich metadata around series, volume, chapters, and progress + /// + /// This call is expensive + /// + /// + [HttpGet("items")] + public async Task>> GetListForUser(int readingListId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var items = await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId); + return Ok(items); + } + + + /// + /// Updates an items position + /// + /// + /// + [HttpPost("update-position")] + public async Task UpdateListItemPosition(UpdateReadingListPosition dto) + { + // Make sure UI buffers events + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) { - _unitOfWork = unitOfWork; - _eventHub = eventHub; - _readingListService = readingListService; + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); } - /// - /// Fetches a single Reading List - /// - /// - /// - [HttpGet] - public async Task>> GetList(int readingListId) + if (await _readingListService.UpdateReadingListItemPosition(dto)) return Ok("Updated"); + + + return BadRequest("Couldn't update position"); + } + + /// + /// Deletes a list item from the list. Will reorder all item positions afterwards + /// + /// + /// + [HttpPost("delete-item")] + public async Task DeleteListItem(UpdateReadingListPosition dto) + { + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.ReadingListRepository.GetReadingListDtoByIdAsync(readingListId, userId)); + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); } - /// - /// Returns reading lists (paginated) for a given user. - /// - /// Defaults to true - /// - [HttpPost("lists")] - public async Task>> GetListsForUser([FromQuery] UserParams userParams, [FromQuery] bool includePromoted = true) + if (await _readingListService.DeleteReadingListItem(dto)) { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var items = await _unitOfWork.ReadingListRepository.GetReadingListDtosForUserAsync(userId, includePromoted, - userParams); - Response.AddPaginationHeader(items.CurrentPage, items.PageSize, items.TotalCount, items.TotalPages); - - return Ok(items); + return Ok("Updated"); } - /// - /// Returns all Reading Lists the user has access to that have a series within it. - /// - /// - /// - [HttpGet("lists-for-series")] - public async Task>> GetListsForSeries(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var items = await _unitOfWork.ReadingListRepository.GetReadingListDtosForSeriesAndUserAsync(userId, seriesId, true); + return BadRequest("Couldn't delete item"); + } - return Ok(items); + /// + /// Removes all entries that are fully read from the reading list + /// + /// + /// + [HttpPost("remove-read")] + public async Task DeleteReadFromList([FromQuery] int readingListId) + { + var user = await _readingListService.UserHasReadingListAccess(readingListId, User.GetUsername()); + if (user == null) + { + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); } - /// - /// Fetches all reading list items for a given list including rich metadata around series, volume, chapters, and progress - /// - /// This call is expensive - /// - /// - [HttpGet("items")] - public async Task>> GetListForUser(int readingListId) + if (await _readingListService.RemoveFullyReadItems(readingListId, user)) { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var items = await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId); - return Ok(items); + return Ok("Updated"); } + return BadRequest("Could not remove read items"); + } - /// - /// Updates an items position - /// - /// - /// - [HttpPost("update-position")] - public async Task UpdateListItemPosition(UpdateReadingListPosition dto) + /// + /// Deletes a reading list + /// + /// + /// + [HttpDelete] + public async Task DeleteList([FromQuery] int readingListId) + { + var user = await _readingListService.UserHasReadingListAccess(readingListId, User.GetUsername()); + if (user == null) { - // Make sure UI buffers events - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + } + + if (await _readingListService.DeleteReadingList(readingListId, user)) return Ok("List was deleted"); + + return BadRequest("There was an issue deleting reading list"); + } + + /// + /// Creates a new List with a unique title. Returns the new ReadingList back + /// + /// + /// + [HttpPost("create")] + public async Task> CreateList(CreateReadingListDto dto) + { + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.ReadingListsWithItems); + + // When creating, we need to make sure Title is unique + var hasExisting = user.ReadingLists.Any(l => l.Title.Equals(dto.Title)); + if (hasExisting) + { + return BadRequest("A list of this name already exists"); + } + + var readingList = DbFactory.ReadingList(dto.Title, string.Empty, false); + user.ReadingLists.Add(readingList); + + if (!_unitOfWork.HasChanges()) return BadRequest("There was a problem creating list"); + + await _unitOfWork.CommitAsync(); + + return Ok(await _unitOfWork.ReadingListRepository.GetReadingListDtoByTitleAsync(user.Id, dto.Title)); + } + + /// + /// Update the properties (title, summary) of a reading list + /// + /// + /// + [HttpPost("update")] + public async Task UpdateList(UpdateReadingListDto dto) + { + var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(dto.ReadingListId); + if (readingList == null) return BadRequest("List does not exist"); + + var user = await _readingListService.UserHasReadingListAccess(readingList.Id, User.GetUsername()); + if (user == null) + { + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + } + + dto.Title = dto.Title.Trim(); + if (!string.IsNullOrEmpty(dto.Title)) + { + readingList.Summary = dto.Summary; + + if (!readingList.Title.Equals(dto.Title)) { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - - if (await _readingListService.UpdateReadingListItemPosition(dto)) return Ok("Updated"); - - - return BadRequest("Couldn't update position"); - } - - /// - /// Deletes a list item from the list. Will reorder all item positions afterwards - /// - /// - /// - [HttpPost("delete-item")] - public async Task DeleteListItem(UpdateReadingListPosition dto) - { - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - - if (await _readingListService.DeleteReadingListItem(dto)) - { - return Ok("Updated"); - } - - return BadRequest("Couldn't delete item"); - } - - /// - /// Removes all entries that are fully read from the reading list - /// - /// - /// - [HttpPost("remove-read")] - public async Task DeleteReadFromList([FromQuery] int readingListId) - { - var user = await _readingListService.UserHasReadingListAccess(readingListId, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - - if (await _readingListService.RemoveFullyReadItems(readingListId, user)) - { - return Ok("Updated"); - } - - return BadRequest("Could not remove read items"); - } - - /// - /// Deletes a reading list - /// - /// - /// - [HttpDelete] - public async Task DeleteList([FromQuery] int readingListId) - { - var user = await _readingListService.UserHasReadingListAccess(readingListId, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - - if (await _readingListService.DeleteReadingList(readingListId, user)) return Ok("List was deleted"); - - return BadRequest("There was an issue deleting reading list"); - } - - /// - /// Creates a new List with a unique title. Returns the new ReadingList back - /// - /// - /// - [HttpPost("create")] - public async Task> CreateList(CreateReadingListDto dto) - { - - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.ReadingListsWithItems); - - // When creating, we need to make sure Title is unique - var hasExisting = user.ReadingLists.Any(l => l.Title.Equals(dto.Title)); - if (hasExisting) - { - return BadRequest("A list of this name already exists"); - } - - var readingList = DbFactory.ReadingList(dto.Title, string.Empty, false); - user.ReadingLists.Add(readingList); - - if (!_unitOfWork.HasChanges()) return BadRequest("There was a problem creating list"); - - await _unitOfWork.CommitAsync(); - - return Ok(await _unitOfWork.ReadingListRepository.GetReadingListDtoByTitleAsync(user.Id, dto.Title)); - } - - /// - /// Update the properties (title, summary) of a reading list - /// - /// - /// - [HttpPost("update")] - public async Task UpdateList(UpdateReadingListDto dto) - { - var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(dto.ReadingListId); - if (readingList == null) return BadRequest("List does not exist"); - - var user = await _readingListService.UserHasReadingListAccess(readingList.Id, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - - if (!string.IsNullOrEmpty(dto.Title)) - { - readingList.Title = dto.Title; // Should I check if this is unique? + var hasExisting = user.ReadingLists.Any(l => l.Title.Equals(dto.Title)); + if (hasExisting) + { + return BadRequest("A list of this name already exists"); + } + readingList.Title = dto.Title; readingList.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(readingList.Title); } - if (!string.IsNullOrEmpty(dto.Title)) - { - readingList.Summary = dto.Summary; - } - - readingList.Promoted = dto.Promoted; - - readingList.CoverImageLocked = dto.CoverImageLocked; - - if (!dto.CoverImageLocked) - { - readingList.CoverImageLocked = false; - readingList.CoverImage = string.Empty; - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(readingList.Id, MessageFactoryEntityTypes.ReadingList), false); - _unitOfWork.ReadingListRepository.Update(readingList); - } - + } + readingList.Promoted = dto.Promoted; + readingList.CoverImageLocked = dto.CoverImageLocked; + if (!dto.CoverImageLocked) + { + readingList.CoverImageLocked = false; + readingList.CoverImage = string.Empty; + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(readingList.Id, MessageFactoryEntityTypes.ReadingList), false); _unitOfWork.ReadingListRepository.Update(readingList); + } - if (await _unitOfWork.CommitAsync()) + + + _unitOfWork.ReadingListRepository.Update(readingList); + + if (await _unitOfWork.CommitAsync()) + { + return Ok("Updated"); + } + return BadRequest("Could not update reading list"); + } + + /// + /// Adds all chapters from a Series to a reading list + /// + /// + /// + [HttpPost("update-by-series")] + public async Task UpdateListBySeries(UpdateReadingListBySeriesDto dto) + { + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) + { + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + } + + var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); + if (readingList == null) return BadRequest("Reading List does not exist"); + var chapterIdsForSeries = + await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new [] {dto.SeriesId}); + + // If there are adds, tell tracking this has been modified + if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, chapterIdsForSeries, readingList)) + { + _unitOfWork.ReadingListRepository.Update(readingList); + } + + try + { + if (_unitOfWork.HasChanges()) { + await _unitOfWork.CommitAsync(); return Ok("Updated"); } - return BadRequest("Could not update reading list"); + } + catch + { + await _unitOfWork.RollbackAsync(); } - /// - /// Adds all chapters from a Series to a reading list - /// - /// - /// - [HttpPost("update-by-series")] - public async Task UpdateListBySeries(UpdateReadingListBySeriesDto dto) + return Ok("Nothing to do"); + } + + + /// + /// Adds all chapters from a list of volumes and chapters to a reading list + /// + /// + /// + [HttpPost("update-by-multiple")] + public async Task UpdateListByMultiple(UpdateReadingListByMultipleDto dto) + { + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) { - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + } + var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); + if (readingList == null) return BadRequest("Reading List does not exist"); + + var chapterIds = await _unitOfWork.VolumeRepository.GetChapterIdsByVolumeIds(dto.VolumeIds); + foreach (var chapterId in dto.ChapterIds) + { + chapterIds.Add(chapterId); + } + + // If there are adds, tell tracking this has been modified + if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, chapterIds, readingList)) + { + _unitOfWork.ReadingListRepository.Update(readingList); + } + + try + { + if (_unitOfWork.HasChanges()) { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + await _unitOfWork.CommitAsync(); + return Ok("Updated"); } + } + catch + { + await _unitOfWork.RollbackAsync(); + } - var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); - if (readingList == null) return BadRequest("Reading List does not exist"); - var chapterIdsForSeries = - await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new [] {dto.SeriesId}); + return Ok("Nothing to do"); + } + /// + /// Adds all chapters from a list of series to a reading list + /// + /// + /// + [HttpPost("update-by-multiple-series")] + public async Task UpdateListByMultipleSeries(UpdateReadingListByMultipleSeriesDto dto) + { + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) + { + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + } + var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); + if (readingList == null) return BadRequest("Reading List does not exist"); + + var ids = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(dto.SeriesIds.ToArray()); + + foreach (var seriesId in ids.Keys) + { // If there are adds, tell tracking this has been modified - if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, chapterIdsForSeries, readingList)) + if (await _readingListService.AddChaptersToReadingList(seriesId, ids[seriesId], readingList)) { _unitOfWork.ReadingListRepository.Update(readingList); } - - try - { - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - return Ok("Updated"); - } - } - catch - { - await _unitOfWork.RollbackAsync(); - } - - return Ok("Nothing to do"); } - - /// - /// Adds all chapters from a list of volumes and chapters to a reading list - /// - /// - /// - [HttpPost("update-by-multiple")] - public async Task UpdateListByMultiple(UpdateReadingListByMultipleDto dto) + try { - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) + if (_unitOfWork.HasChanges()) { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + await _unitOfWork.CommitAsync(); + return Ok("Updated"); } - var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); - if (readingList == null) return BadRequest("Reading List does not exist"); - - var chapterIds = await _unitOfWork.VolumeRepository.GetChapterIdsByVolumeIds(dto.VolumeIds); - foreach (var chapterId in dto.ChapterIds) - { - chapterIds.Add(chapterId); - } - - // If there are adds, tell tracking this has been modified - if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, chapterIds, readingList)) - { - _unitOfWork.ReadingListRepository.Update(readingList); - } - - try - { - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - return Ok("Updated"); - } - } - catch - { - await _unitOfWork.RollbackAsync(); - } - - return Ok("Nothing to do"); } - - /// - /// Adds all chapters from a list of series to a reading list - /// - /// - /// - [HttpPost("update-by-multiple-series")] - public async Task UpdateListByMultipleSeries(UpdateReadingListByMultipleSeriesDto dto) + catch { - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); - if (readingList == null) return BadRequest("Reading List does not exist"); - - var ids = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(dto.SeriesIds.ToArray()); - - foreach (var seriesId in ids.Keys) - { - // If there are adds, tell tracking this has been modified - if (await _readingListService.AddChaptersToReadingList(seriesId, ids[seriesId], readingList)) - { - _unitOfWork.ReadingListRepository.Update(readingList); - } - } - - try - { - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - return Ok("Updated"); - } - } - catch - { - await _unitOfWork.RollbackAsync(); - } - - return Ok("Nothing to do"); + await _unitOfWork.RollbackAsync(); } - [HttpPost("update-by-volume")] - public async Task UpdateListByVolume(UpdateReadingListByVolumeDto dto) + return Ok("Nothing to do"); + } + + [HttpPost("update-by-volume")] + public async Task UpdateListByVolume(UpdateReadingListByVolumeDto dto) + { + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) { - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); - if (readingList == null) return BadRequest("Reading List does not exist"); - - var chapterIdsForVolume = - (await _unitOfWork.ChapterRepository.GetChaptersAsync(dto.VolumeId)).Select(c => c.Id).ToList(); - - // If there are adds, tell tracking this has been modified - if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, chapterIdsForVolume, readingList)) - { - _unitOfWork.ReadingListRepository.Update(readingList); - } - - try - { - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - return Ok("Updated"); - } - } - catch - { - await _unitOfWork.RollbackAsync(); - } - - return Ok("Nothing to do"); + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); } + var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); + if (readingList == null) return BadRequest("Reading List does not exist"); - [HttpPost("update-by-chapter")] - public async Task UpdateListByChapter(UpdateReadingListByChapterDto dto) + var chapterIdsForVolume = + (await _unitOfWork.ChapterRepository.GetChaptersAsync(dto.VolumeId)).Select(c => c.Id).ToList(); + + // If there are adds, tell tracking this has been modified + if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, chapterIdsForVolume, readingList)) { - var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); - if (user == null) - { - return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); - } - var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); - if (readingList == null) return BadRequest("Reading List does not exist"); - - // If there are adds, tell tracking this has been modified - if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, new List() { dto.ChapterId }, readingList)) - { - _unitOfWork.ReadingListRepository.Update(readingList); - } - - try - { - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - return Ok("Updated"); - } - } - catch - { - await _unitOfWork.RollbackAsync(); - } - - return Ok("Nothing to do"); + _unitOfWork.ReadingListRepository.Update(readingList); } - - - /// - /// Returns the next chapter within the reading list - /// - /// - /// - /// Chapter Id for next item, -1 if nothing exists - [HttpGet("next-chapter")] - public async Task> GetNextChapter(int currentChapterId, int readingListId) + try { - var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemsByIdAsync(readingListId)).ToList(); - var readingListItem = items.SingleOrDefault(rl => rl.ChapterId == currentChapterId); - if (readingListItem == null) return BadRequest("Id does not exist"); - var index = items.IndexOf(readingListItem) + 1; - if (items.Count > index) + if (_unitOfWork.HasChanges()) { - return items[index].ChapterId; + await _unitOfWork.CommitAsync(); + return Ok("Updated"); } - - return Ok(-1); } - - /// - /// Returns the prev chapter within the reading list - /// - /// - /// - /// Chapter Id for next item, -1 if nothing exists - [HttpGet("prev-chapter")] - public async Task> GetPrevChapter(int currentChapterId, int readingListId) + catch { - var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemsByIdAsync(readingListId)).ToList(); - var readingListItem = items.SingleOrDefault(rl => rl.ChapterId == currentChapterId); - if (readingListItem == null) return BadRequest("Id does not exist"); - var index = items.IndexOf(readingListItem) - 1; - if (0 <= index) - { - return items[index].ChapterId; - } - - return Ok(-1); + await _unitOfWork.RollbackAsync(); } + + return Ok("Nothing to do"); + } + + [HttpPost("update-by-chapter")] + public async Task UpdateListByChapter(UpdateReadingListByChapterDto dto) + { + var user = await _readingListService.UserHasReadingListAccess(dto.ReadingListId, User.GetUsername()); + if (user == null) + { + return BadRequest("You do not have permissions on this reading list or the list doesn't exist"); + } + var readingList = user.ReadingLists.SingleOrDefault(l => l.Id == dto.ReadingListId); + if (readingList == null) return BadRequest("Reading List does not exist"); + + // If there are adds, tell tracking this has been modified + if (await _readingListService.AddChaptersToReadingList(dto.SeriesId, new List() { dto.ChapterId }, readingList)) + { + _unitOfWork.ReadingListRepository.Update(readingList); + } + + try + { + if (_unitOfWork.HasChanges()) + { + await _unitOfWork.CommitAsync(); + return Ok("Updated"); + } + } + catch + { + await _unitOfWork.RollbackAsync(); + } + + return Ok("Nothing to do"); + } + + + + /// + /// Returns the next chapter within the reading list + /// + /// + /// + /// Chapter Id for next item, -1 if nothing exists + [HttpGet("next-chapter")] + public async Task> GetNextChapter(int currentChapterId, int readingListId) + { + var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemsByIdAsync(readingListId)).ToList(); + var readingListItem = items.SingleOrDefault(rl => rl.ChapterId == currentChapterId); + if (readingListItem == null) return BadRequest("Id does not exist"); + var index = items.IndexOf(readingListItem) + 1; + if (items.Count > index) + { + return items[index].ChapterId; + } + + return Ok(-1); + } + + /// + /// Returns the prev chapter within the reading list + /// + /// + /// + /// Chapter Id for next item, -1 if nothing exists + [HttpGet("prev-chapter")] + public async Task> GetPrevChapter(int currentChapterId, int readingListId) + { + var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemsByIdAsync(readingListId)).ToList(); + var readingListItem = items.SingleOrDefault(rl => rl.ChapterId == currentChapterId); + if (readingListItem == null) return BadRequest("Id does not exist"); + var index = items.IndexOf(readingListItem) - 1; + if (0 <= index) + { + return items[index].ChapterId; + } + + return Ok(-1); } } diff --git a/API/Controllers/RecommendedController.cs b/API/Controllers/RecommendedController.cs index 215b55397..893cb852a 100644 --- a/API/Controllers/RecommendedController.cs +++ b/API/Controllers/RecommendedController.cs @@ -1,5 +1,4 @@ -using System.Collections.Generic; -using System.Threading.Tasks; +using System.Threading.Tasks; using API.Data; using API.DTOs; using API.Extensions; diff --git a/API/Controllers/SearchController.cs b/API/Controllers/SearchController.cs new file mode 100644 index 000000000..722a3b310 --- /dev/null +++ b/API/Controllers/SearchController.cs @@ -0,0 +1,67 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.DTOs; +using API.DTOs.Search; +using API.Extensions; +using Microsoft.AspNetCore.Mvc; + +namespace API.Controllers; + +/// +/// Responsible for the Search interface from the UI +/// +public class SearchController : BaseApiController +{ + private readonly IUnitOfWork _unitOfWork; + + public SearchController(IUnitOfWork unitOfWork) + { + _unitOfWork = unitOfWork; + } + + /// + /// Returns the series for the MangaFile id. If the user does not have access (shouldn't happen by the UI), + /// then null is returned + /// + /// + /// + [HttpGet("series-for-mangafile")] + public async Task> GetSeriesForMangaFile(int mangaFileId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.SeriesRepository.GetSeriesForMangaFile(mangaFileId, userId)); + } + + /// + /// Returns the series for the Chapter id. If the user does not have access (shouldn't happen by the UI), + /// then null is returned + /// + /// + /// + [HttpGet("series-for-chapter")] + public async Task> GetSeriesForChapter(int chapterId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.SeriesRepository.GetSeriesForChapter(chapterId, userId)); + } + + [HttpGet("search")] + public async Task> Search(string queryString) + { + queryString = Uri.UnescapeDataString(queryString).Trim().Replace(@"%", string.Empty).Replace(":", string.Empty); + + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + // Get libraries user has access to + var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList(); + + if (!libraries.Any()) return BadRequest("User does not have access to any libraries"); + if (!libraries.Any()) return BadRequest("User does not have access to any libraries"); + var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); + + var series = await _unitOfWork.SeriesRepository.SearchSeries(user.Id, isAdmin, libraries.Select(l => l.Id).ToArray(), queryString); + + return Ok(series); + } +} diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index 6f458b6b8..4433ade21 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -19,480 +19,422 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; -namespace API.Controllers +namespace API.Controllers; + +public class SeriesController : BaseApiController { - public class SeriesController : BaseApiController + private readonly ILogger _logger; + private readonly ITaskScheduler _taskScheduler; + private readonly IUnitOfWork _unitOfWork; + private readonly ISeriesService _seriesService; + + + public SeriesController(ILogger logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork, ISeriesService seriesService) { - private readonly ILogger _logger; - private readonly ITaskScheduler _taskScheduler; - private readonly IUnitOfWork _unitOfWork; - private readonly ISeriesService _seriesService; - - - public SeriesController(ILogger logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork, ISeriesService seriesService) - { - _logger = logger; - _taskScheduler = taskScheduler; - _unitOfWork = unitOfWork; - _seriesService = seriesService; - } - - [HttpPost] - public async Task>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams, [FromBody] FilterDto filterDto) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var series = - await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, userParams, filterDto); - - // Apply progress/rating information (I can't work out how to do this in initial query) - if (series == null) return BadRequest("Could not get series for library"); - - await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); - - Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); - - return Ok(series); - } - - /// - /// Fetches a Series for a given Id - /// - /// Series Id to fetch details for - /// - /// Throws an exception if the series Id does exist - [HttpGet("{seriesId:int}")] - public async Task> GetSeries(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - try - { - return Ok(await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId)); - } - catch (Exception e) - { - _logger.LogError(e, "There was an issue fetching {SeriesId}", seriesId); - throw new KavitaException("This series does not exist"); - } - - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpDelete("{seriesId}")] - public async Task> DeleteSeries(int seriesId) - { - var username = User.GetUsername(); - _logger.LogInformation("Series {SeriesId} is being deleted by {UserName}", seriesId, username); - - return Ok(await _seriesService.DeleteMultipleSeries(new[] {seriesId})); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("delete-multiple")] - public async Task DeleteMultipleSeries(DeleteSeriesDto dto) - { - var username = User.GetUsername(); - _logger.LogInformation("Series {SeriesId} is being deleted by {UserName}", dto.SeriesIds, username); - - if (await _seriesService.DeleteMultipleSeries(dto.SeriesIds)) return Ok(); - - return BadRequest("There was an issue deleting the series requested"); - } - - /// - /// Returns All volumes for a series with progress information and Chapters - /// - /// - /// - [HttpGet("volumes")] - public async Task>> GetVolumes(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)); - } - - [HttpGet("volume")] - public async Task> GetVolume(int volumeId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(volumeId, userId)); - } - - [HttpGet("chapter")] - public async Task> GetChapter(int chapterId) - { - return Ok(await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId)); - } - - [HttpGet("chapter-metadata")] - public async Task> GetChapterMetadata(int chapterId) - { - return Ok(await _unitOfWork.ChapterRepository.GetChapterMetadataDtoAsync(chapterId)); - } - - - [HttpPost("update-rating")] - public async Task UpdateSeriesRating(UpdateSeriesRatingDto updateSeriesRatingDto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Ratings); - if (!await _seriesService.UpdateRating(user, updateSeriesRatingDto)) return BadRequest("There was a critical error."); - return Ok(); - } - - [HttpPost("update")] - public async Task UpdateSeries(UpdateSeriesDto updateSeries) - { - _logger.LogInformation("{UserName} is updating Series {SeriesName}", User.GetUsername(), updateSeries.Name); - - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(updateSeries.Id); - - if (series == null) return BadRequest("Series does not exist"); - - var seriesExists = - await _unitOfWork.SeriesRepository.DoesSeriesNameExistInLibrary(updateSeries.Name.Trim(), series.LibraryId, - series.Format); - if (series.Name != updateSeries.Name && seriesExists) - { - return BadRequest("A series already exists in this library with this name. Series Names must be unique to a library."); - } - - series.Name = updateSeries.Name.Trim(); - series.NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name); - if (!string.IsNullOrEmpty(updateSeries.SortName.Trim())) - { - series.SortName = updateSeries.SortName.Trim(); - } - - series.LocalizedName = updateSeries.LocalizedName.Trim(); - series.NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.LocalizedName); - - series.NameLocked = updateSeries.NameLocked; - series.SortNameLocked = updateSeries.SortNameLocked; - series.LocalizedNameLocked = updateSeries.LocalizedNameLocked; - - - var needsRefreshMetadata = false; - // This is when you hit Reset - if (series.CoverImageLocked && !updateSeries.CoverImageLocked) - { - // Trigger a refresh when we are moving from a locked image to a non-locked - needsRefreshMetadata = true; - series.CoverImage = string.Empty; - series.CoverImageLocked = updateSeries.CoverImageLocked; - } - - _unitOfWork.SeriesRepository.Update(series); - - if (await _unitOfWork.CommitAsync()) - { - if (needsRefreshMetadata) - { - _taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id); - } - return Ok(); - } - - return BadRequest("There was an error with updating the series"); - } - - [HttpPost("recently-added")] - public async Task>> GetRecentlyAdded(FilterDto filterDto, [FromQuery] UserParams userParams, [FromQuery] int libraryId = 0) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var series = - await _unitOfWork.SeriesRepository.GetRecentlyAdded(libraryId, userId, userParams, filterDto); - - // Apply progress/rating information (I can't work out how to do this in initial query) - if (series == null) return BadRequest("Could not get series"); - - await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); - - Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); - - return Ok(series); - } - - [HttpPost("recently-updated-series")] - public async Task>> GetRecentlyAddedChapters() - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.SeriesRepository.GetRecentlyUpdatedSeries(userId)); - } - - [HttpPost("all")] - public async Task>> GetAllSeries(FilterDto filterDto, [FromQuery] UserParams userParams, [FromQuery] int libraryId = 0) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var series = - await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, userParams, filterDto); - - // Apply progress/rating information (I can't work out how to do this in initial query) - if (series == null) return BadRequest("Could not get series"); - - await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); - - Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); - - return Ok(series); - } - - /// - /// Fetches series that are on deck aka have progress on them. - /// - /// - /// - /// Default of 0 meaning all libraries - /// - [HttpPost("on-deck")] - public async Task>> GetOnDeck(FilterDto filterDto, [FromQuery] UserParams userParams, [FromQuery] int libraryId = 0) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var pagedList = await _unitOfWork.SeriesRepository.GetOnDeck(userId, libraryId, userParams, filterDto); - - await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, pagedList); - - Response.AddPaginationHeader(pagedList.CurrentPage, pagedList.PageSize, pagedList.TotalCount, pagedList.TotalPages); - - return Ok(pagedList); - } - - /// - /// Runs a Cover Image Generation task - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("refresh-metadata")] - public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto) - { - _taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, refreshSeriesDto.ForceUpdate); - return Ok(); - } - - /// - /// Scan a series and force each file to be updated. This should be invoked via the User, hence why we force. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("scan")] - public ActionResult ScanSeries(RefreshSeriesDto refreshSeriesDto) - { - _taskScheduler.ScanSeries(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, refreshSeriesDto.ForceUpdate); - return Ok(); - } - - /// - /// Run a file analysis on the series. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("analyze")] - public ActionResult AnalyzeSeries(RefreshSeriesDto refreshSeriesDto) - { - _taskScheduler.AnalyzeFilesForSeries(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, refreshSeriesDto.ForceUpdate); - return Ok(); - } - - /// - /// Returns metadata for a given series - /// - /// - /// - [HttpGet("metadata")] - public async Task> GetSeriesMetadata(int seriesId) - { - var metadata = await _unitOfWork.SeriesRepository.GetSeriesMetadata(seriesId); - return Ok(metadata); - } - - /// - /// Update series metadata - /// - /// - /// - [HttpPost("metadata")] - public async Task UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto) - { - if (await _seriesService.UpdateSeriesMetadata(updateSeriesMetadataDto)) - { - return Ok("Successfully updated"); - } - - return BadRequest("Could not update metadata"); - } - - /// - /// Returns all Series grouped by the passed Collection Id with Pagination. - /// - /// Collection Id to pull series from - /// Pagination information - /// - [HttpGet("series-by-collection")] - public async Task>> GetSeriesByCollectionTag(int collectionId, [FromQuery] UserParams userParams) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var series = - await _unitOfWork.SeriesRepository.GetSeriesDtoForCollectionAsync(collectionId, userId, userParams); - - // Apply progress/rating information (I can't work out how to do this in initial query) - if (series == null) return BadRequest("Could not get series for collection"); - - await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); - - Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); - - return Ok(series); - } - - /// - /// Fetches Series for a set of Ids. This will check User for permission access and filter out any Ids that don't exist or - /// the user does not have access to. - /// - /// - [HttpPost("series-by-ids")] - public async Task>> GetAllSeriesById(SeriesByIdsDto dto) - { - if (dto.SeriesIds == null) return BadRequest("Must pass seriesIds"); - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.SeriesRepository.GetSeriesDtoForIdsAsync(dto.SeriesIds, userId)); - } - - /// - /// Get the age rating for the enum value - /// - /// - /// - [HttpGet("age-rating")] - public ActionResult GetAgeRating(int ageRating) - { - var val = (AgeRating) ageRating; - - return Ok(val.ToDescription()); - } - - /// - /// Get a special DTO for Series Detail page. - /// - /// - /// - /// Do not rely on this API externally. May change without hesitation. - [HttpGet("series-detail")] - public async Task> GetSeriesDetailBreakdown(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return await _seriesService.GetSeriesDetail(seriesId, userId); - } - - /// - /// Returns the series for the MangaFile id. If the user does not have access (shouldn't happen by the UI), - /// then null is returned - /// - /// - /// - [HttpGet("series-for-mangafile")] - public async Task> GetSeriesForMangaFile(int mangaFileId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.SeriesRepository.GetSeriesForMangaFile(mangaFileId, userId)); - } - - /// - /// Returns the series for the Chapter id. If the user does not have access (shouldn't happen by the UI), - /// then null is returned - /// - /// - /// - [HttpGet("series-for-chapter")] - public async Task> GetSeriesForChapter(int chapterId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.SeriesRepository.GetSeriesForChapter(chapterId, userId)); - } - - /// - /// Fetches the related series for a given series - /// - /// - /// Type of Relationship to pull back - /// - [HttpGet("related")] - public async Task>> GetRelatedSeries(int seriesId, RelationKind relation) - { - // Send back a custom DTO with each type or maybe sorted in some way - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.SeriesRepository.GetSeriesForRelationKind(userId, seriesId, relation)); - } - - /// - /// Returns all related series against the passed series Id - /// - /// - /// - [HttpGet("all-related")] - public async Task> GetAllRelatedSeries(int seriesId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - return Ok(await _unitOfWork.SeriesRepository.GetRelatedSeries(userId, seriesId)); - } - - - /// - /// Update the relations attached to the Series. Does not generate associated Sequel/Prequel pairs on target series. - /// - /// - /// - [Authorize(Policy="RequireAdminRole")] - [HttpPost("update-related")] - public async Task UpdateRelatedSeries(UpdateRelatedSeriesDto dto) - { - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(dto.SeriesId, SeriesIncludes.Related); - - UpdateRelationForKind(dto.Adaptations, series.Relations.Where(r => r.RelationKind == RelationKind.Adaptation).ToList(), series, RelationKind.Adaptation); - UpdateRelationForKind(dto.Characters, series.Relations.Where(r => r.RelationKind == RelationKind.Character).ToList(), series, RelationKind.Character); - UpdateRelationForKind(dto.Contains, series.Relations.Where(r => r.RelationKind == RelationKind.Contains).ToList(), series, RelationKind.Contains); - UpdateRelationForKind(dto.Others, series.Relations.Where(r => r.RelationKind == RelationKind.Other).ToList(), series, RelationKind.Other); - UpdateRelationForKind(dto.SideStories, series.Relations.Where(r => r.RelationKind == RelationKind.SideStory).ToList(), series, RelationKind.SideStory); - UpdateRelationForKind(dto.SpinOffs, series.Relations.Where(r => r.RelationKind == RelationKind.SpinOff).ToList(), series, RelationKind.SpinOff); - UpdateRelationForKind(dto.AlternativeSettings, series.Relations.Where(r => r.RelationKind == RelationKind.AlternativeSetting).ToList(), series, RelationKind.AlternativeSetting); - UpdateRelationForKind(dto.AlternativeVersions, series.Relations.Where(r => r.RelationKind == RelationKind.AlternativeVersion).ToList(), series, RelationKind.AlternativeVersion); - UpdateRelationForKind(dto.Doujinshis, series.Relations.Where(r => r.RelationKind == RelationKind.Doujinshi).ToList(), series, RelationKind.Doujinshi); - UpdateRelationForKind(dto.Prequels, series.Relations.Where(r => r.RelationKind == RelationKind.Prequel).ToList(), series, RelationKind.Prequel); - UpdateRelationForKind(dto.Sequels, series.Relations.Where(r => r.RelationKind == RelationKind.Sequel).ToList(), series, RelationKind.Sequel); - - if (!_unitOfWork.HasChanges()) return Ok(); - if (await _unitOfWork.CommitAsync()) return Ok(); - - - return BadRequest("There was an issue updating relationships"); - } - - // TODO: Move this to a Service and Unit Test it - private void UpdateRelationForKind(ICollection dtoTargetSeriesIds, IEnumerable adaptations, Series series, RelationKind kind) - { - foreach (var adaptation in adaptations.Where(adaptation => !dtoTargetSeriesIds.Contains(adaptation.TargetSeriesId))) - { - // If the seriesId isn't in dto, it means we've removed or reclassified - series.Relations.Remove(adaptation); - } - - // At this point, we only have things to add - foreach (var targetSeriesId in dtoTargetSeriesIds) - { - // This ensures we don't allow any duplicates to be added - if (series.Relations.SingleOrDefault(r => - r.RelationKind == kind && r.TargetSeriesId == targetSeriesId) != - null) continue; - - series.Relations.Add(new SeriesRelation() - { - Series = series, - SeriesId = series.Id, - TargetSeriesId = targetSeriesId, - RelationKind = kind - }); - _unitOfWork.SeriesRepository.Update(series); - } - } + _logger = logger; + _taskScheduler = taskScheduler; + _unitOfWork = unitOfWork; + _seriesService = seriesService; } + + [HttpPost] + public async Task>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams, [FromBody] FilterDto filterDto) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var series = + await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, userParams, filterDto); + + // Apply progress/rating information (I can't work out how to do this in initial query) + if (series == null) return BadRequest("Could not get series for library"); + + await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); + + Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); + + return Ok(series); + } + + /// + /// Fetches a Series for a given Id + /// + /// Series Id to fetch details for + /// + /// Throws an exception if the series Id does exist + [HttpGet("{seriesId:int}")] + public async Task> GetSeries(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + try + { + return Ok(await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId)); + } + catch (Exception e) + { + _logger.LogError(e, "There was an issue fetching {SeriesId}", seriesId); + throw new KavitaException("This series does not exist"); + } + + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpDelete("{seriesId}")] + public async Task> DeleteSeries(int seriesId) + { + var username = User.GetUsername(); + _logger.LogInformation("Series {SeriesId} is being deleted by {UserName}", seriesId, username); + + return Ok(await _seriesService.DeleteMultipleSeries(new[] {seriesId})); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("delete-multiple")] + public async Task DeleteMultipleSeries(DeleteSeriesDto dto) + { + var username = User.GetUsername(); + _logger.LogInformation("Series {SeriesId} is being deleted by {UserName}", dto.SeriesIds, username); + + if (await _seriesService.DeleteMultipleSeries(dto.SeriesIds)) return Ok(); + + return BadRequest("There was an issue deleting the series requested"); + } + + /// + /// Returns All volumes for a series with progress information and Chapters + /// + /// + /// + [HttpGet("volumes")] + public async Task>> GetVolumes(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)); + } + + [HttpGet("volume")] + public async Task> GetVolume(int volumeId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(volumeId, userId)); + } + + [HttpGet("chapter")] + public async Task> GetChapter(int chapterId) + { + return Ok(await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId)); + } + + [HttpGet("chapter-metadata")] + public async Task> GetChapterMetadata(int chapterId) + { + return Ok(await _unitOfWork.ChapterRepository.GetChapterMetadataDtoAsync(chapterId)); + } + + + [HttpPost("update-rating")] + public async Task UpdateSeriesRating(UpdateSeriesRatingDto updateSeriesRatingDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Ratings); + if (!await _seriesService.UpdateRating(user, updateSeriesRatingDto)) return BadRequest("There was a critical error."); + return Ok(); + } + + [HttpPost("update")] + public async Task UpdateSeries(UpdateSeriesDto updateSeries) + { + _logger.LogInformation("{UserName} is updating Series {SeriesName}", User.GetUsername(), updateSeries.Name); + + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(updateSeries.Id); + + if (series == null) return BadRequest("Series does not exist"); + + var seriesExists = + await _unitOfWork.SeriesRepository.DoesSeriesNameExistInLibrary(updateSeries.Name.Trim(), series.LibraryId, + series.Format); + if (series.Name != updateSeries.Name && seriesExists) + { + return BadRequest("A series already exists in this library with this name. Series Names must be unique to a library."); + } + + series.Name = updateSeries.Name.Trim(); + series.NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name); + if (!string.IsNullOrEmpty(updateSeries.SortName.Trim())) + { + series.SortName = updateSeries.SortName.Trim(); + } + + series.LocalizedName = updateSeries.LocalizedName.Trim(); + series.NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.LocalizedName); + + series.NameLocked = updateSeries.NameLocked; + series.SortNameLocked = updateSeries.SortNameLocked; + series.LocalizedNameLocked = updateSeries.LocalizedNameLocked; + + + var needsRefreshMetadata = false; + // This is when you hit Reset + if (series.CoverImageLocked && !updateSeries.CoverImageLocked) + { + // Trigger a refresh when we are moving from a locked image to a non-locked + needsRefreshMetadata = true; + series.CoverImage = string.Empty; + series.CoverImageLocked = updateSeries.CoverImageLocked; + } + + _unitOfWork.SeriesRepository.Update(series); + + if (await _unitOfWork.CommitAsync()) + { + if (needsRefreshMetadata) + { + _taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id); + } + return Ok(); + } + + return BadRequest("There was an error with updating the series"); + } + + [ResponseCache(CacheProfileName = "Instant")] + [HttpPost("recently-added")] + public async Task>> GetRecentlyAdded(FilterDto filterDto, [FromQuery] UserParams userParams, [FromQuery] int libraryId = 0) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var series = + await _unitOfWork.SeriesRepository.GetRecentlyAdded(libraryId, userId, userParams, filterDto); + + // Apply progress/rating information (I can't work out how to do this in initial query) + if (series == null) return BadRequest("Could not get series"); + + await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); + + Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); + + return Ok(series); + } + + [ResponseCache(CacheProfileName = "Instant")] + [HttpPost("recently-updated-series")] + public async Task>> GetRecentlyAddedChapters() + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.SeriesRepository.GetRecentlyUpdatedSeries(userId)); + } + + [HttpPost("all")] + public async Task>> GetAllSeries(FilterDto filterDto, [FromQuery] UserParams userParams, [FromQuery] int libraryId = 0) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var series = + await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, userParams, filterDto); + + // Apply progress/rating information (I can't work out how to do this in initial query) + if (series == null) return BadRequest("Could not get series"); + + await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); + + Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); + + return Ok(series); + } + + /// + /// Fetches series that are on deck aka have progress on them. + /// + /// + /// + /// Default of 0 meaning all libraries + /// + [ResponseCache(CacheProfileName = "Instant")] + [HttpPost("on-deck")] + public async Task>> GetOnDeck(FilterDto filterDto, [FromQuery] UserParams userParams, [FromQuery] int libraryId = 0) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var pagedList = await _unitOfWork.SeriesRepository.GetOnDeck(userId, libraryId, userParams, filterDto); + + await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, pagedList); + + Response.AddPaginationHeader(pagedList.CurrentPage, pagedList.PageSize, pagedList.TotalCount, pagedList.TotalPages); + + return Ok(pagedList); + } + + /// + /// Runs a Cover Image Generation task + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("refresh-metadata")] + public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto) + { + _taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, refreshSeriesDto.ForceUpdate); + return Ok(); + } + + /// + /// Scan a series and force each file to be updated. This should be invoked via the User, hence why we force. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("scan")] + public ActionResult ScanSeries(RefreshSeriesDto refreshSeriesDto) + { + _taskScheduler.ScanSeries(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, refreshSeriesDto.ForceUpdate); + return Ok(); + } + + /// + /// Run a file analysis on the series. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("analyze")] + public ActionResult AnalyzeSeries(RefreshSeriesDto refreshSeriesDto) + { + _taskScheduler.AnalyzeFilesForSeries(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, refreshSeriesDto.ForceUpdate); + return Ok(); + } + + /// + /// Returns metadata for a given series + /// + /// + /// + [HttpGet("metadata")] + public async Task> GetSeriesMetadata(int seriesId) + { + var metadata = await _unitOfWork.SeriesRepository.GetSeriesMetadata(seriesId); + return Ok(metadata); + } + + /// + /// Update series metadata + /// + /// + /// + [HttpPost("metadata")] + public async Task UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto) + { + if (await _seriesService.UpdateSeriesMetadata(updateSeriesMetadataDto)) + { + return Ok("Successfully updated"); + } + + return BadRequest("Could not update metadata"); + } + + /// + /// Returns all Series grouped by the passed Collection Id with Pagination. + /// + /// Collection Id to pull series from + /// Pagination information + /// + [HttpGet("series-by-collection")] + public async Task>> GetSeriesByCollectionTag(int collectionId, [FromQuery] UserParams userParams) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var series = + await _unitOfWork.SeriesRepository.GetSeriesDtoForCollectionAsync(collectionId, userId, userParams); + + // Apply progress/rating information (I can't work out how to do this in initial query) + if (series == null) return BadRequest("Could not get series for collection"); + + await _unitOfWork.SeriesRepository.AddSeriesModifiers(userId, series); + + Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); + + return Ok(series); + } + + /// + /// Fetches Series for a set of Ids. This will check User for permission access and filter out any Ids that don't exist or + /// the user does not have access to. + /// + /// + [HttpPost("series-by-ids")] + public async Task>> GetAllSeriesById(SeriesByIdsDto dto) + { + if (dto.SeriesIds == null) return BadRequest("Must pass seriesIds"); + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.SeriesRepository.GetSeriesDtoForIdsAsync(dto.SeriesIds, userId)); + } + + /// + /// Get the age rating for the enum value + /// + /// + /// + /// This is cached for an hour + [ResponseCache(CacheProfileName = "Hour", VaryByQueryKeys = new [] {"ageRating"})] + [HttpGet("age-rating")] + public ActionResult GetAgeRating(int ageRating) + { + var val = (AgeRating) ageRating; + if (val == AgeRating.NotApplicable) return "No Restriction"; + + return Ok(val.ToDescription()); + } + + /// + /// Get a special DTO for Series Detail page. + /// + /// + /// + /// Do not rely on this API externally. May change without hesitation. + [ResponseCache(CacheProfileName = "5Minute", VaryByQueryKeys = new [] {"seriesId"})] + [HttpGet("series-detail")] + public async Task> GetSeriesDetailBreakdown(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return await _seriesService.GetSeriesDetail(seriesId, userId); + } + + + + /// + /// Fetches the related series for a given series + /// + /// + /// Type of Relationship to pull back + /// + [HttpGet("related")] + public async Task>> GetRelatedSeries(int seriesId, RelationKind relation) + { + // Send back a custom DTO with each type or maybe sorted in some way + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.SeriesRepository.GetSeriesForRelationKind(userId, seriesId, relation)); + } + + /// + /// Returns all related series against the passed series Id + /// + /// + /// + [HttpGet("all-related")] + public async Task> GetAllRelatedSeries(int seriesId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + return Ok(await _seriesService.GetRelatedSeries(userId, seriesId)); + } + + + /// + /// Update the relations attached to the Series. Does not generate associated Sequel/Prequel pairs on target series. + /// + /// + /// + [Authorize(Policy="RequireAdminRole")] + [HttpPost("update-related")] + public async Task UpdateRelatedSeries(UpdateRelatedSeriesDto dto) + { + if (await _seriesService.UpdateRelatedSeries(dto)) + { + return Ok(); + } + + return BadRequest("There was an issue updating relationships"); + } + } diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 161541a24..f43bcf271 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -8,6 +8,7 @@ using API.DTOs.Jobs; using API.DTOs.Stats; using API.DTOs.Update; using API.Extensions; +using API.Logging; using API.Services; using API.Services.Tasks; using Hangfire; @@ -20,143 +21,154 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using TaskScheduler = System.Threading.Tasks.TaskScheduler; -namespace API.Controllers +namespace API.Controllers; + +[Authorize(Policy = "RequireAdminRole")] +public class ServerController : BaseApiController { - [Authorize(Policy = "RequireAdminRole")] - public class ServerController : BaseApiController + private readonly IHostApplicationLifetime _applicationLifetime; + private readonly ILogger _logger; + private readonly IBackupService _backupService; + private readonly IArchiveService _archiveService; + private readonly IVersionUpdaterService _versionUpdaterService; + private readonly IStatsService _statsService; + private readonly ICleanupService _cleanupService; + private readonly IEmailService _emailService; + private readonly IBookmarkService _bookmarkService; + + public ServerController(IHostApplicationLifetime applicationLifetime, ILogger logger, + IBackupService backupService, IArchiveService archiveService, IVersionUpdaterService versionUpdaterService, IStatsService statsService, + ICleanupService cleanupService, IEmailService emailService, IBookmarkService bookmarkService) { - private readonly IHostApplicationLifetime _applicationLifetime; - private readonly ILogger _logger; - private readonly IConfiguration _config; - private readonly IBackupService _backupService; - private readonly IArchiveService _archiveService; - private readonly IVersionUpdaterService _versionUpdaterService; - private readonly IStatsService _statsService; - private readonly ICleanupService _cleanupService; - private readonly IEmailService _emailService; - private readonly IBookmarkService _bookmarkService; + _applicationLifetime = applicationLifetime; + _logger = logger; + _backupService = backupService; + _archiveService = archiveService; + _versionUpdaterService = versionUpdaterService; + _statsService = statsService; + _cleanupService = cleanupService; + _emailService = emailService; + _bookmarkService = bookmarkService; + } - public ServerController(IHostApplicationLifetime applicationLifetime, ILogger logger, IConfiguration config, - IBackupService backupService, IArchiveService archiveService, IVersionUpdaterService versionUpdaterService, IStatsService statsService, - ICleanupService cleanupService, IEmailService emailService, IBookmarkService bookmarkService) + /// + /// Attempts to Restart the server. Does not work, will shutdown the instance. + /// + /// + [HttpPost("restart")] + public ActionResult RestartServer() + { + _logger.LogInformation("{UserName} is restarting server from admin dashboard", User.GetUsername()); + + _applicationLifetime.StopApplication(); + return Ok(); + } + + /// + /// Performs an ad-hoc cleanup of Cache + /// + /// + [HttpPost("clear-cache")] + public ActionResult ClearCache() + { + _logger.LogInformation("{UserName} is clearing cache of server from admin dashboard", User.GetUsername()); + _cleanupService.CleanupCacheAndTempDirectories(); + + return Ok(); + } + + /// + /// Performs an ad-hoc cleanup of Want To Read, by removing want to read series for users, where the series are fully read and in Completed publication status. + /// + /// + [HttpPost("cleanup-want-to-read")] + public ActionResult CleanupWantToRead() + { + _logger.LogInformation("{UserName} is clearing running want to read cleanup from admin dashboard", User.GetUsername()); + RecurringJob.TriggerJob(API.Services.TaskScheduler.RemoveFromWantToReadTaskId); + + return Ok(); + } + + /// + /// Performs an ad-hoc backup of the Database + /// + /// + [HttpPost("backup-db")] + public ActionResult BackupDatabase() + { + _logger.LogInformation("{UserName} is backing up database of server from admin dashboard", User.GetUsername()); + RecurringJob.TriggerJob(API.Services.TaskScheduler.BackupTaskId); + return Ok(); + } + + /// + /// Returns non-sensitive information about the current system + /// + /// + [HttpGet("server-info")] + public async Task> GetVersion() + { + return Ok(await _statsService.GetServerInfo()); + } + + /// + /// Triggers the scheduling of the convert bookmarks job. Only one job will run at a time. + /// + /// + [HttpPost("convert-bookmarks")] + public ActionResult ScheduleConvertBookmarks() + { + BackgroundJob.Enqueue(() => _bookmarkService.ConvertAllBookmarkToWebP()); + return Ok(); + } + + [HttpGet("logs")] + public ActionResult GetLogs() + { + var files = _backupService.GetLogFiles(); + try { - _applicationLifetime = applicationLifetime; - _logger = logger; - _config = config; - _backupService = backupService; - _archiveService = archiveService; - _versionUpdaterService = versionUpdaterService; - _statsService = statsService; - _cleanupService = cleanupService; - _emailService = emailService; - _bookmarkService = bookmarkService; + var zipPath = _archiveService.CreateZipForDownload(files, "logs"); + return PhysicalFile(zipPath, "application/zip", Path.GetFileName(zipPath), true); } - - /// - /// Attempts to Restart the server. Does not work, will shutdown the instance. - /// - /// - [HttpPost("restart")] - public ActionResult RestartServer() + catch (KavitaException ex) { - _logger.LogInformation("{UserName} is restarting server from admin dashboard", User.GetUsername()); - - _applicationLifetime.StopApplication(); - return Ok(); + return BadRequest(ex.Message); } + } - /// - /// Performs an ad-hoc cleanup of Cache - /// - /// - [HttpPost("clear-cache")] - public ActionResult ClearCache() - { - _logger.LogInformation("{UserName} is clearing cache of server from admin dashboard", User.GetUsername()); - _cleanupService.CleanupCacheDirectory(); + /// + /// Checks for updates, if no updates that are > current version installed, returns null + /// + [HttpGet("check-update")] + public async Task> CheckForUpdates() + { + return Ok(await _versionUpdaterService.CheckForUpdate()); + } - return Ok(); - } + [HttpGet("changelog")] + public async Task>> GetChangelog() + { + return Ok(await _versionUpdaterService.GetAllReleases()); + } - /// - /// Performs an ad-hoc backup of the Database - /// - /// - [HttpPost("backup-db")] - public ActionResult BackupDatabase() - { - _logger.LogInformation("{UserName} is backing up database of server from admin dashboard", User.GetUsername()); - RecurringJob.Trigger("backup"); - return Ok(); - } + /// + /// Is this server accessible to the outside net + /// + /// + [HttpGet("accessible")] + [AllowAnonymous] + public async Task> IsServerAccessible() + { + return await _emailService.CheckIfAccessible(Request.Host.ToString()); + } - /// - /// Returns non-sensitive information about the current system - /// - /// - [HttpGet("server-info")] - public async Task> GetVersion() - { - return Ok(await _statsService.GetServerInfo()); - } - - /// - /// Triggers the scheduling of the convert bookmarks job. Only one job will run at a time. - /// - /// - [HttpPost("convert-bookmarks")] - public ActionResult ScheduleConvertBookmarks() - { - BackgroundJob.Enqueue(() => _bookmarkService.ConvertAllBookmarkToWebP()); - return Ok(); - } - - [HttpGet("logs")] - public ActionResult GetLogs() - { - var files = _backupService.GetLogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); - try - { - var zipPath = _archiveService.CreateZipForDownload(files, "logs"); - return PhysicalFile(zipPath, "application/zip", Path.GetFileName(zipPath), true); - } - catch (KavitaException ex) - { - return BadRequest(ex.Message); - } - } - - /// - /// Checks for updates, if no updates that are > current version installed, returns null - /// - [HttpGet("check-update")] - public async Task> CheckForUpdates() - { - return Ok(await _versionUpdaterService.CheckForUpdate()); - } - - [HttpGet("changelog")] - public async Task>> GetChangelog() - { - return Ok(await _versionUpdaterService.GetAllReleases()); - } - - /// - /// Is this server accessible to the outside net - /// - /// - [HttpGet("accessible")] - [AllowAnonymous] - public async Task> IsServerAccessible() - { - return await _emailService.CheckIfAccessible(Request.Host.ToString()); - } - - [HttpGet("jobs")] - public ActionResult> GetJobs() - { - var recurringJobs = Hangfire.JobStorage.Current.GetConnection().GetRecurringJobs().Select( - dto => + [HttpGet("jobs")] + public ActionResult> GetJobs() + { + var recurringJobs = JobStorage.Current.GetConnection().GetRecurringJobs().Select( + dto => new JobDto() { Id = dto.Id, Title = dto.Id.Replace('-', ' '), @@ -165,10 +177,9 @@ namespace API.Controllers LastExecution = dto.LastExecution, }); - // For now, let's just do something simple - //var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs("default", 0, int.MaxValue); - return Ok(recurringJobs); + // For now, let's just do something simple + //var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs("default", 0, int.MaxValue); + return Ok(recurringJobs); - } } } diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index e1a758775..739cb6e18 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -9,6 +9,7 @@ using API.DTOs.Settings; using API.Entities.Enums; using API.Extensions; using API.Helpers.Converters; +using API.Logging; using API.Services; using API.Services.Tasks.Scanner; using AutoMapper; @@ -20,285 +21,294 @@ using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; -namespace API.Controllers +namespace API.Controllers; + +public class SettingsController : BaseApiController { - public class SettingsController : BaseApiController + private readonly ILogger _logger; + private readonly IUnitOfWork _unitOfWork; + private readonly ITaskScheduler _taskScheduler; + private readonly IDirectoryService _directoryService; + private readonly IMapper _mapper; + private readonly IEmailService _emailService; + private readonly ILibraryWatcher _libraryWatcher; + + public SettingsController(ILogger logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler, + IDirectoryService directoryService, IMapper mapper, IEmailService emailService, ILibraryWatcher libraryWatcher) { - private readonly ILogger _logger; - private readonly IUnitOfWork _unitOfWork; - private readonly ITaskScheduler _taskScheduler; - private readonly IDirectoryService _directoryService; - private readonly IMapper _mapper; - private readonly IEmailService _emailService; - private readonly ILibraryWatcher _libraryWatcher; + _logger = logger; + _unitOfWork = unitOfWork; + _taskScheduler = taskScheduler; + _directoryService = directoryService; + _mapper = mapper; + _emailService = emailService; + _libraryWatcher = libraryWatcher; + } - public SettingsController(ILogger logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler, - IDirectoryService directoryService, IMapper mapper, IEmailService emailService, ILibraryWatcher libraryWatcher) + [AllowAnonymous] + [HttpGet("base-url")] + public async Task> GetBaseUrl() + { + var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + return Ok(settingsDto.BaseUrl); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpGet] + public async Task> GetSettings() + { + var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + return Ok(settingsDto); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("reset")] + public async Task> ResetSettings() + { + _logger.LogInformation("{UserName} is resetting Server Settings", User.GetUsername()); + + return await UpdateSettings(_mapper.Map(Seed.DefaultSettings)); + } + + /// + /// Resets the email service url + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("reset-email-url")] + public async Task> ResetEmailServiceUrlSettings() + { + _logger.LogInformation("{UserName} is resetting Email Service Url Setting", User.GetUsername()); + var emailSetting = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl); + emailSetting.Value = EmailService.DefaultApiUrl; + _unitOfWork.SettingsRepository.Update(emailSetting); + + if (!await _unitOfWork.CommitAsync()) { - _logger = logger; - _unitOfWork = unitOfWork; - _taskScheduler = taskScheduler; - _directoryService = directoryService; - _mapper = mapper; - _emailService = emailService; - _libraryWatcher = libraryWatcher; + await _unitOfWork.RollbackAsync(); } - [AllowAnonymous] - [HttpGet("base-url")] - public async Task> GetBaseUrl() + return Ok(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("test-email-url")] + public async Task> TestEmailServiceUrl(TestEmailDto dto) + { + return Ok(await _emailService.TestConnectivity(dto.Url)); + } + + + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost] + public async Task> UpdateSettings(ServerSettingDto updateSettingsDto) + { + _logger.LogInformation("{UserName} is updating Server Settings", User.GetUsername()); + + // We do not allow CacheDirectory changes, so we will ignore. + var currentSettings = await _unitOfWork.SettingsRepository.GetSettingsAsync(); + var updateBookmarks = false; + var originalBookmarkDirectory = _directoryService.BookmarkDirectory; + + var bookmarkDirectory = updateSettingsDto.BookmarksDirectory; + if (!updateSettingsDto.BookmarksDirectory.EndsWith("bookmarks") && + !updateSettingsDto.BookmarksDirectory.EndsWith("bookmarks/")) { - var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); - return Ok(settingsDto.BaseUrl); + bookmarkDirectory = _directoryService.FileSystem.Path.Join(updateSettingsDto.BookmarksDirectory, "bookmarks"); } - [Authorize(Policy = "RequireAdminRole")] - [HttpGet] - public async Task> GetSettings() + if (string.IsNullOrEmpty(updateSettingsDto.BookmarksDirectory)) { - var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); - return Ok(settingsDto); + bookmarkDirectory = _directoryService.BookmarkDirectory; } - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("reset")] - public async Task> ResetSettings() + foreach (var setting in currentSettings) { - _logger.LogInformation("{UserName} is resetting Server Settings", User.GetUsername()); - - return await UpdateSettings(_mapper.Map(Seed.DefaultSettings)); - } - - /// - /// Resets the email service url - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("reset-email-url")] - public async Task> ResetEmailServiceUrlSettings() - { - _logger.LogInformation("{UserName} is resetting Email Service Url Setting", User.GetUsername()); - var emailSetting = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl); - emailSetting.Value = EmailService.DefaultApiUrl; - _unitOfWork.SettingsRepository.Update(emailSetting); - - if (!await _unitOfWork.CommitAsync()) + if (setting.Key == ServerSettingKey.TaskBackup && updateSettingsDto.TaskBackup != setting.Value) { - await _unitOfWork.RollbackAsync(); + setting.Value = updateSettingsDto.TaskBackup; + _unitOfWork.SettingsRepository.Update(setting); } - return Ok(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("test-email-url")] - public async Task> TestEmailServiceUrl(TestEmailDto dto) - { - return Ok(await _emailService.TestConnectivity(dto.Url)); - } - - - - [Authorize(Policy = "RequireAdminRole")] - [HttpPost] - public async Task> UpdateSettings(ServerSettingDto updateSettingsDto) - { - _logger.LogInformation("{UserName} is updating Server Settings", User.GetUsername()); - - // We do not allow CacheDirectory changes, so we will ignore. - var currentSettings = await _unitOfWork.SettingsRepository.GetSettingsAsync(); - var updateBookmarks = false; - var originalBookmarkDirectory = _directoryService.BookmarkDirectory; - - var bookmarkDirectory = updateSettingsDto.BookmarksDirectory; - if (!updateSettingsDto.BookmarksDirectory.EndsWith("bookmarks") && - !updateSettingsDto.BookmarksDirectory.EndsWith("bookmarks/")) + if (setting.Key == ServerSettingKey.TaskScan && updateSettingsDto.TaskScan != setting.Value) { - bookmarkDirectory = _directoryService.FileSystem.Path.Join(updateSettingsDto.BookmarksDirectory, "bookmarks"); + setting.Value = updateSettingsDto.TaskScan; + _unitOfWork.SettingsRepository.Update(setting); } - if (string.IsNullOrEmpty(updateSettingsDto.BookmarksDirectory)) + if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + string.Empty != setting.Value) { - bookmarkDirectory = _directoryService.BookmarkDirectory; + setting.Value = updateSettingsDto.Port + string.Empty; + // Port is managed in appSetting.json + Configuration.Port = updateSettingsDto.Port; + _unitOfWork.SettingsRepository.Update(setting); } - foreach (var setting in currentSettings) + if (setting.Key == ServerSettingKey.BaseUrl && updateSettingsDto.BaseUrl + string.Empty != setting.Value) { - if (setting.Key == ServerSettingKey.TaskBackup && updateSettingsDto.TaskBackup != setting.Value) - { - setting.Value = updateSettingsDto.TaskBackup; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.TaskScan && updateSettingsDto.TaskScan != setting.Value) - { - setting.Value = updateSettingsDto.TaskScan; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.Port + string.Empty; - // Port is managed in appSetting.json - Configuration.Port = updateSettingsDto.Port; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.BaseUrl && updateSettingsDto.BaseUrl + string.Empty != setting.Value) - { - var path = !updateSettingsDto.BaseUrl.StartsWith("/") - ? $"/{updateSettingsDto.BaseUrl}" - : updateSettingsDto.BaseUrl; - path = !path.EndsWith("/") - ? $"{path}/" - : path; - setting.Value = path; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.LoggingLevel + string.Empty; - Configuration.LogLevel = updateSettingsDto.LoggingLevel; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.EnableOpds && updateSettingsDto.EnableOpds + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.EnableOpds + string.Empty; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.ConvertBookmarkToWebP && updateSettingsDto.ConvertBookmarkToWebP + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.ConvertBookmarkToWebP + string.Empty; - _unitOfWork.SettingsRepository.Update(setting); - } - - - if (setting.Key == ServerSettingKey.BookmarkDirectory && bookmarkDirectory != setting.Value) - { - // Validate new directory can be used - if (!await _directoryService.CheckWriteAccess(bookmarkDirectory)) - { - return BadRequest("Bookmark Directory does not have correct permissions for Kavita to use"); - } - - originalBookmarkDirectory = setting.Value; - // Normalize the path deliminators. Just to look nice in DB, no functionality - setting.Value = _directoryService.FileSystem.Path.GetFullPath(bookmarkDirectory); - _unitOfWork.SettingsRepository.Update(setting); - updateBookmarks = true; - - } - - if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.AllowStatCollection + string.Empty; - _unitOfWork.SettingsRepository.Update(setting); - if (!updateSettingsDto.AllowStatCollection) - { - _taskScheduler.CancelStatsTasks(); - } - else - { - await _taskScheduler.ScheduleStatsTasks(); - } - } - - if (setting.Key == ServerSettingKey.EnableSwaggerUi && updateSettingsDto.EnableSwaggerUi + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.EnableSwaggerUi + string.Empty; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.TotalBackups && updateSettingsDto.TotalBackups + string.Empty != setting.Value) - { - if (updateSettingsDto.TotalBackups > 30 || updateSettingsDto.TotalBackups < 1) - { - return BadRequest("Total Backups must be between 1 and 30"); - } - setting.Value = updateSettingsDto.TotalBackups + string.Empty; - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.EmailServiceUrl && updateSettingsDto.EmailServiceUrl + string.Empty != setting.Value) - { - setting.Value = string.IsNullOrEmpty(updateSettingsDto.EmailServiceUrl) ? EmailService.DefaultApiUrl : updateSettingsDto.EmailServiceUrl; - FlurlHttp.ConfigureClient(setting.Value, cli => - cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); - - _unitOfWork.SettingsRepository.Update(setting); - } - - if (setting.Key == ServerSettingKey.EnableFolderWatching && updateSettingsDto.EnableFolderWatching + string.Empty != setting.Value) - { - setting.Value = updateSettingsDto.EnableFolderWatching + string.Empty; - _unitOfWork.SettingsRepository.Update(setting); - - if (updateSettingsDto.EnableFolderWatching) - { - await _libraryWatcher.StartWatching(); - } - else - { - _libraryWatcher.StopWatching(); - } - } + var path = !updateSettingsDto.BaseUrl.StartsWith("/") + ? $"/{updateSettingsDto.BaseUrl}" + : updateSettingsDto.BaseUrl; + path = !path.EndsWith("/") + ? $"{path}/" + : path; + setting.Value = path; + _unitOfWork.SettingsRepository.Update(setting); } - if (!_unitOfWork.HasChanges()) return Ok(updateSettingsDto); - - try + if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + string.Empty != setting.Value) { - await _unitOfWork.CommitAsync(); - - if (updateBookmarks) - { - _directoryService.ExistOrCreate(bookmarkDirectory); - _directoryService.CopyDirectoryToDirectory(originalBookmarkDirectory, bookmarkDirectory); - _directoryService.ClearAndDeleteDirectory(originalBookmarkDirectory); - } + setting.Value = updateSettingsDto.LoggingLevel + string.Empty; + LogLevelOptions.SwitchLogLevel(updateSettingsDto.LoggingLevel); + _unitOfWork.SettingsRepository.Update(setting); } - catch (Exception ex) + + if (setting.Key == ServerSettingKey.EnableOpds && updateSettingsDto.EnableOpds + string.Empty != setting.Value) { - _logger.LogError(ex, "There was an exception when updating server settings"); - await _unitOfWork.RollbackAsync(); - return BadRequest("There was a critical issue. Please try again."); + setting.Value = updateSettingsDto.EnableOpds + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); + } + + if (setting.Key == ServerSettingKey.ConvertBookmarkToWebP && updateSettingsDto.ConvertBookmarkToWebP + string.Empty != setting.Value) + { + setting.Value = updateSettingsDto.ConvertBookmarkToWebP + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); } - _logger.LogInformation("Server Settings updated"); - await _taskScheduler.ScheduleTasks(); - return Ok(updateSettingsDto); + if (setting.Key == ServerSettingKey.BookmarkDirectory && bookmarkDirectory != setting.Value) + { + // Validate new directory can be used + if (!await _directoryService.CheckWriteAccess(bookmarkDirectory)) + { + return BadRequest("Bookmark Directory does not have correct permissions for Kavita to use"); + } + + originalBookmarkDirectory = setting.Value; + // Normalize the path deliminators. Just to look nice in DB, no functionality + setting.Value = _directoryService.FileSystem.Path.GetFullPath(bookmarkDirectory); + _unitOfWork.SettingsRepository.Update(setting); + updateBookmarks = true; + + } + + if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + string.Empty != setting.Value) + { + setting.Value = updateSettingsDto.AllowStatCollection + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); + if (!updateSettingsDto.AllowStatCollection) + { + _taskScheduler.CancelStatsTasks(); + } + else + { + await _taskScheduler.ScheduleStatsTasks(); + } + } + + if (setting.Key == ServerSettingKey.EnableSwaggerUi && updateSettingsDto.EnableSwaggerUi + string.Empty != setting.Value) + { + setting.Value = updateSettingsDto.EnableSwaggerUi + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); + } + + if (setting.Key == ServerSettingKey.TotalBackups && updateSettingsDto.TotalBackups + string.Empty != setting.Value) + { + if (updateSettingsDto.TotalBackups > 30 || updateSettingsDto.TotalBackups < 1) + { + return BadRequest("Total Backups must be between 1 and 30"); + } + setting.Value = updateSettingsDto.TotalBackups + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); + } + + if (setting.Key == ServerSettingKey.TotalLogs && updateSettingsDto.TotalLogs + string.Empty != setting.Value) + { + if (updateSettingsDto.TotalLogs > 30 || updateSettingsDto.TotalLogs < 1) + { + return BadRequest("Total Logs must be between 1 and 30"); + } + setting.Value = updateSettingsDto.TotalLogs + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); + } + + if (setting.Key == ServerSettingKey.EmailServiceUrl && updateSettingsDto.EmailServiceUrl + string.Empty != setting.Value) + { + setting.Value = string.IsNullOrEmpty(updateSettingsDto.EmailServiceUrl) ? EmailService.DefaultApiUrl : updateSettingsDto.EmailServiceUrl; + FlurlHttp.ConfigureClient(setting.Value, cli => + cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); + + _unitOfWork.SettingsRepository.Update(setting); + } + + if (setting.Key == ServerSettingKey.EnableFolderWatching && updateSettingsDto.EnableFolderWatching + string.Empty != setting.Value) + { + setting.Value = updateSettingsDto.EnableFolderWatching + string.Empty; + _unitOfWork.SettingsRepository.Update(setting); + + if (updateSettingsDto.EnableFolderWatching) + { + await _libraryWatcher.StartWatching(); + } + else + { + _libraryWatcher.StopWatching(); + } + } } - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("task-frequencies")] - public ActionResult> GetTaskFrequencies() + if (!_unitOfWork.HasChanges()) return Ok(updateSettingsDto); + + try { - return Ok(CronConverter.Options); + await _unitOfWork.CommitAsync(); + + if (updateBookmarks) + { + _directoryService.ExistOrCreate(bookmarkDirectory); + _directoryService.CopyDirectoryToDirectory(originalBookmarkDirectory, bookmarkDirectory); + _directoryService.ClearAndDeleteDirectory(originalBookmarkDirectory); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception when updating server settings"); + await _unitOfWork.RollbackAsync(); + return BadRequest("There was a critical issue. Please try again."); } - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("library-types")] - public ActionResult> GetLibraryTypes() - { - return Ok(Enum.GetValues().Select(t => t.ToDescription())); - } - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("log-levels")] - public ActionResult> GetLogLevels() - { - return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical"}); - } + _logger.LogInformation("Server Settings updated"); + await _taskScheduler.ScheduleTasks(); + return Ok(updateSettingsDto); + } - [HttpGet("opds-enabled")] - public async Task> GetOpdsEnabled() - { - var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); - return Ok(settingsDto.EnableOpds); - } + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("task-frequencies")] + public ActionResult> GetTaskFrequencies() + { + return Ok(CronConverter.Options); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("library-types")] + public ActionResult> GetLibraryTypes() + { + return Ok(Enum.GetValues().Select(t => t.ToDescription())); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("log-levels")] + public ActionResult> GetLogLevels() + { + return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical"}); + } + + [HttpGet("opds-enabled")] + public async Task> GetOpdsEnabled() + { + var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + return Ok(settingsDto.EnableOpds); } } diff --git a/API/Controllers/TachiyomiController.cs b/API/Controllers/TachiyomiController.cs index f1f6a1f03..77f32764d 100644 --- a/API/Controllers/TachiyomiController.cs +++ b/API/Controllers/TachiyomiController.cs @@ -1,15 +1,9 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Threading.Tasks; -using API.Comparators; +using System.Threading.Tasks; using API.Data; using API.Data.Repositories; using API.DTOs; -using API.Entities; using API.Extensions; using API.Services; -using AutoMapper; using Microsoft.AspNetCore.Mvc; namespace API.Controllers; @@ -21,14 +15,12 @@ namespace API.Controllers; public class TachiyomiController : BaseApiController { private readonly IUnitOfWork _unitOfWork; - private readonly IReaderService _readerService; - private readonly IMapper _mapper; + private readonly ITachiyomiService _tachiyomiService; - public TachiyomiController(IUnitOfWork unitOfWork, IReaderService readerService, IMapper mapper) + public TachiyomiController(IUnitOfWork unitOfWork, ITachiyomiService tachiyomiService) { _unitOfWork = unitOfWork; - _readerService = readerService; - _mapper = mapper; + _tachiyomiService = tachiyomiService; } /// @@ -39,53 +31,9 @@ public class TachiyomiController : BaseApiController [HttpGet("latest-chapter")] public async Task> GetLatestChapter(int seriesId) { + if (seriesId < 1) return BadRequest("seriesId must be greater than 0"); var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - - var currentChapter = await _readerService.GetContinuePoint(seriesId, userId); - - var prevChapterId = - await _readerService.GetPrevChapterIdAsync(seriesId, currentChapter.VolumeId, currentChapter.Id, userId); - - // If prevChapterId is -1, this means either nothing is read or everything is read. - if (prevChapterId == -1) - { - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); - var userHasProgress = series.PagesRead != 0 && series.PagesRead < series.Pages; - - // If the user doesn't have progress, then return null, which the extension will catch as 204 (no content) and report nothing as read - if (!userHasProgress) return null; - - // Else return the max chapter to Tachiyomi so it can consider everything read - var volumes = (await _unitOfWork.VolumeRepository.GetVolumes(seriesId)).ToImmutableList(); - var looseLeafChapterVolume = volumes.FirstOrDefault(v => v.Number == 0); - if (looseLeafChapterVolume == null) - { - var volumeChapter = _mapper.Map(volumes.Last().Chapters.OrderBy(c => float.Parse(c.Number), ChapterSortComparerZeroFirst.Default).Last()); - return Ok(new ChapterDto() - { - Number = $"{int.Parse(volumeChapter.Number) / 100f}" - }); - } - - var lastChapter = looseLeafChapterVolume.Chapters.OrderBy(c => float.Parse(c.Number), ChapterSortComparer.Default).Last(); - return Ok(_mapper.Map(lastChapter)); - } - - // There is progress, we now need to figure out the highest volume or chapter and return that. - var prevChapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(prevChapterId); - var volumeWithProgress = await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(prevChapter.VolumeId, userId); - // We only encode for single-file volumes - if (volumeWithProgress.Number != 0 && volumeWithProgress.Chapters.Count == 1) - { - // The progress is on a volume, encode it as a fake chapterDTO - return Ok(new ChapterDto() - { - Number = $"{volumeWithProgress.Number / 100f}" - }); - } - - // Progress is just on a chapter, return as is - return Ok(prevChapter); + return Ok(await _tachiyomiService.GetLatestChapter(seriesId, userId)); } /// @@ -97,34 +45,6 @@ public class TachiyomiController : BaseApiController public async Task> MarkChaptersUntilAsRead(int seriesId, float chapterNumber) { var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); - user.Progresses ??= new List(); - - switch (chapterNumber) - { - // When Tachiyomi sync's progress, if there is no current progress in Tachiyomi, 0.0f is sent. - // Due to the encoding for volumes, this marks all chapters in volume 0 (loose chapters) as read. - // Hence we catch and return early, so we ignore the request. - case 0.0f: - return true; - case < 1.0f: - { - // This is a hack to track volume number. We need to map it back by x100 - var volumeNumber = int.Parse($"{chapterNumber * 100f}"); - await _readerService.MarkVolumesUntilAsRead(user, seriesId, volumeNumber); - break; - } - default: - await _readerService.MarkChaptersUntilAsRead(user, seriesId, chapterNumber); - break; - } - - - _unitOfWork.UserRepository.Update(user); - - if (!_unitOfWork.HasChanges()) return Ok(true); - if (await _unitOfWork.CommitAsync()) return Ok(true); - - await _unitOfWork.RollbackAsync(); - return Ok(false); + return Ok(await _tachiyomiService.MarkChaptersUntilAsRead(user, seriesId, chapterNumber)); } } diff --git a/API/Controllers/ThemeController.cs b/API/Controllers/ThemeController.cs index 6defbe574..d6a9b526e 100644 --- a/API/Controllers/ThemeController.cs +++ b/API/Controllers/ThemeController.cs @@ -24,6 +24,7 @@ public class ThemeController : BaseApiController _taskScheduler = taskScheduler; } + [ResponseCache(CacheProfileName = "10Minute")] [AllowAnonymous] [HttpGet] public async Task>> GetThemes() diff --git a/API/Controllers/UploadController.cs b/API/Controllers/UploadController.cs index c7def1408..68d28e442 100644 --- a/API/Controllers/UploadController.cs +++ b/API/Controllers/UploadController.cs @@ -12,298 +12,297 @@ using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; using NetVips; -namespace API.Controllers +namespace API.Controllers; + +/// +/// +/// +[Authorize(Policy = "RequireAdminRole")] +public class UploadController : BaseApiController { - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - public class UploadController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IImageService _imageService; + private readonly ILogger _logger; + private readonly ITaskScheduler _taskScheduler; + private readonly IDirectoryService _directoryService; + private readonly IEventHub _eventHub; + + /// + public UploadController(IUnitOfWork unitOfWork, IImageService imageService, ILogger logger, + ITaskScheduler taskScheduler, IDirectoryService directoryService, IEventHub eventHub) { - private readonly IUnitOfWork _unitOfWork; - private readonly IImageService _imageService; - private readonly ILogger _logger; - private readonly ITaskScheduler _taskScheduler; - private readonly IDirectoryService _directoryService; - private readonly IEventHub _eventHub; + _unitOfWork = unitOfWork; + _imageService = imageService; + _logger = logger; + _taskScheduler = taskScheduler; + _directoryService = directoryService; + _eventHub = eventHub; + } - /// - public UploadController(IUnitOfWork unitOfWork, IImageService imageService, ILogger logger, - ITaskScheduler taskScheduler, IDirectoryService directoryService, IEventHub eventHub) + /// + /// This stores a file (image) in temp directory for use in a cover image replacement flow. + /// This is automatically cleaned up. + /// + /// Escaped url to download from + /// filename + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("upload-by-url")] + public async Task> GetImageFromFile(UploadUrlDto dto) + { + var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace('/', '_').Replace(':', '_'); + var format = _directoryService.FileSystem.Path.GetExtension(dto.Url.Split('?')[0]).Replace(".", string.Empty); + try { - _unitOfWork = unitOfWork; - _imageService = imageService; - _logger = logger; - _taskScheduler = taskScheduler; - _directoryService = directoryService; - _eventHub = eventHub; + var path = await dto.Url + .DownloadFileAsync(_directoryService.TempDirectory, $"coverupload_{dateString}.{format}"); + + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) + return BadRequest($"Could not download file"); + + if (!await _imageService.IsImage(path)) return BadRequest("Url does not return a valid image"); + + return $"coverupload_{dateString}.{format}"; + } + catch (FlurlHttpException ex) + { + // Unauthorized + if (ex.StatusCode == 401) + return BadRequest("The server requires authentication to load the url externally"); } - /// - /// This stores a file (image) in temp directory for use in a cover image replacement flow. - /// This is automatically cleaned up. - /// - /// Escaped url to download from - /// filename - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("upload-by-url")] - public async Task> GetImageFromFile(UploadUrlDto dto) + return BadRequest("Unable to download image, please use another url or upload by file"); + } + + /// + /// Replaces series cover image and locks it with a base64 encoded image + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [RequestSizeLimit(8_000_000)] + [HttpPost("series")] + public async Task UploadSeriesCoverImageFromUrl(UploadFileDto uploadFileDto) + { + // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. + // See if we can do this all in memory without touching underlying system + if (string.IsNullOrEmpty(uploadFileDto.Url)) { - var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_"); - var format = _directoryService.FileSystem.Path.GetExtension(dto.Url.Split('?')[0]).Replace(".", ""); - try - { - var path = await dto.Url - .DownloadFileAsync(_directoryService.TempDirectory, $"coverupload_{dateString}.{format}"); - - if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) - return BadRequest($"Could not download file"); - - if (!await _imageService.IsImage(path)) return BadRequest("Url does not return a valid image"); - - return $"coverupload_{dateString}.{format}"; - } - catch (FlurlHttpException ex) - { - // Unauthorized - if (ex.StatusCode == 401) - return BadRequest("The server requires authentication to load the url externally"); - } - - return BadRequest("Unable to download image, please use another url or upload by file"); + return BadRequest("You must pass a url to use"); } - /// - /// Replaces series cover image and locks it with a base64 encoded image - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [RequestSizeLimit(8_000_000)] - [HttpPost("series")] - public async Task UploadSeriesCoverImageFromUrl(UploadFileDto uploadFileDto) + try { - // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. - // See if we can do this all in memory without touching underlying system - if (string.IsNullOrEmpty(uploadFileDto.Url)) + var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, ImageService.GetSeriesFormat(uploadFileDto.Id)); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(uploadFileDto.Id); + + if (!string.IsNullOrEmpty(filePath)) { - return BadRequest("You must pass a url to use"); + series.CoverImage = filePath; + series.CoverImageLocked = true; + _unitOfWork.SeriesRepository.Update(series); } - try + if (_unitOfWork.HasChanges()) { - var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, ImageService.GetSeriesFormat(uploadFileDto.Id)); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(uploadFileDto.Id); - - if (!string.IsNullOrEmpty(filePath)) - { - series.CoverImage = filePath; - series.CoverImageLocked = true; - _unitOfWork.SeriesRepository.Update(series); - } - - if (_unitOfWork.HasChanges()) - { - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series), false); - await _unitOfWork.CommitAsync(); - return Ok(); - } - - } - catch (Exception e) - { - _logger.LogError(e, "There was an issue uploading cover image for Series {Id}", uploadFileDto.Id); - await _unitOfWork.RollbackAsync(); + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series), false); + await _unitOfWork.CommitAsync(); + return Ok(); } - return BadRequest("Unable to save cover image to Series"); + } + catch (Exception e) + { + _logger.LogError(e, "There was an issue uploading cover image for Series {Id}", uploadFileDto.Id); + await _unitOfWork.RollbackAsync(); } - /// - /// Replaces collection tag cover image and locks it with a base64 encoded image - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [RequestSizeLimit(8_000_000)] - [HttpPost("collection")] - public async Task UploadCollectionCoverImageFromUrl(UploadFileDto uploadFileDto) + return BadRequest("Unable to save cover image to Series"); + } + + /// + /// Replaces collection tag cover image and locks it with a base64 encoded image + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [RequestSizeLimit(8_000_000)] + [HttpPost("collection")] + public async Task UploadCollectionCoverImageFromUrl(UploadFileDto uploadFileDto) + { + // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. + // See if we can do this all in memory without touching underlying system + if (string.IsNullOrEmpty(uploadFileDto.Url)) { - // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. - // See if we can do this all in memory without touching underlying system - if (string.IsNullOrEmpty(uploadFileDto.Url)) - { - return BadRequest("You must pass a url to use"); - } - - try - { - var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, $"{ImageService.GetCollectionTagFormat(uploadFileDto.Id)}"); - var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(uploadFileDto.Id); - - if (!string.IsNullOrEmpty(filePath)) - { - tag.CoverImage = filePath; - tag.CoverImageLocked = true; - _unitOfWork.CollectionTagRepository.Update(tag); - } - - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(tag.Id, MessageFactoryEntityTypes.CollectionTag), false); - return Ok(); - } - - } - catch (Exception e) - { - _logger.LogError(e, "There was an issue uploading cover image for Collection Tag {Id}", uploadFileDto.Id); - await _unitOfWork.RollbackAsync(); - } - - return BadRequest("Unable to save cover image to Collection Tag"); + return BadRequest("You must pass a url to use"); } - /// - /// Replaces reading list cover image and locks it with a base64 encoded image - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [RequestSizeLimit(8_000_000)] - [HttpPost("reading-list")] - public async Task UploadReadingListCoverImageFromUrl(UploadFileDto uploadFileDto) + try { - // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. - // See if we can do this all in memory without touching underlying system - if (string.IsNullOrEmpty(uploadFileDto.Url)) + var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, $"{ImageService.GetCollectionTagFormat(uploadFileDto.Id)}"); + var tag = await _unitOfWork.CollectionTagRepository.GetTagAsync(uploadFileDto.Id); + + if (!string.IsNullOrEmpty(filePath)) { - return BadRequest("You must pass a url to use"); + tag.CoverImage = filePath; + tag.CoverImageLocked = true; + _unitOfWork.CollectionTagRepository.Update(tag); } - try + if (_unitOfWork.HasChanges()) { - var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, $"{ImageService.GetReadingListFormat(uploadFileDto.Id)}"); - var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(uploadFileDto.Id); - - if (!string.IsNullOrEmpty(filePath)) - { - readingList.CoverImage = filePath; - readingList.CoverImageLocked = true; - _unitOfWork.ReadingListRepository.Update(readingList); - } - - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(readingList.Id, MessageFactoryEntityTypes.ReadingList), false); - return Ok(); - } - - } - catch (Exception e) - { - _logger.LogError(e, "There was an issue uploading cover image for Reading List {Id}", uploadFileDto.Id); - await _unitOfWork.RollbackAsync(); + await _unitOfWork.CommitAsync(); + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(tag.Id, MessageFactoryEntityTypes.CollectionTag), false); + return Ok(); } - return BadRequest("Unable to save cover image to Reading List"); + } + catch (Exception e) + { + _logger.LogError(e, "There was an issue uploading cover image for Collection Tag {Id}", uploadFileDto.Id); + await _unitOfWork.RollbackAsync(); } - /// - /// Replaces chapter cover image and locks it with a base64 encoded image. This will update the parent volume's cover image. - /// - /// - /// - [Authorize(Policy = "RequireAdminRole")] - [RequestSizeLimit(8_000_000)] - [HttpPost("chapter")] - public async Task UploadChapterCoverImageFromUrl(UploadFileDto uploadFileDto) + return BadRequest("Unable to save cover image to Collection Tag"); + } + + /// + /// Replaces reading list cover image and locks it with a base64 encoded image + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [RequestSizeLimit(8_000_000)] + [HttpPost("reading-list")] + public async Task UploadReadingListCoverImageFromUrl(UploadFileDto uploadFileDto) + { + // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. + // See if we can do this all in memory without touching underlying system + if (string.IsNullOrEmpty(uploadFileDto.Url)) { - // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. - // See if we can do this all in memory without touching underlying system - if (string.IsNullOrEmpty(uploadFileDto.Url)) - { - return BadRequest("You must pass a url to use"); - } - - try - { - var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(uploadFileDto.Id); - var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, $"{ImageService.GetChapterFormat(uploadFileDto.Id, chapter.VolumeId)}"); - - if (!string.IsNullOrEmpty(filePath)) - { - chapter.CoverImage = filePath; - chapter.CoverImageLocked = true; - _unitOfWork.ChapterRepository.Update(chapter); - var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId); - volume.CoverImage = chapter.CoverImage; - _unitOfWork.VolumeRepository.Update(volume); - } - - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(chapter.VolumeId, MessageFactoryEntityTypes.Volume), false); - await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, - MessageFactory.CoverUpdateEvent(chapter.Id, MessageFactoryEntityTypes.Chapter), false); - return Ok(); - } - - } - catch (Exception e) - { - _logger.LogError(e, "There was an issue uploading cover image for Chapter {Id}", uploadFileDto.Id); - await _unitOfWork.RollbackAsync(); - } - - return BadRequest("Unable to save cover image to Chapter"); + return BadRequest("You must pass a url to use"); } - /// - /// Replaces chapter cover image and locks it with a base64 encoded image. This will update the parent volume's cover image. - /// - /// Does not use Url property - /// - [Authorize(Policy = "RequireAdminRole")] - [HttpPost("reset-chapter-lock")] - public async Task ResetChapterLock(UploadFileDto uploadFileDto) + try { - try + var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, $"{ImageService.GetReadingListFormat(uploadFileDto.Id)}"); + var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(uploadFileDto.Id); + + if (!string.IsNullOrEmpty(filePath)) { - var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(uploadFileDto.Id); - var originalFile = chapter.CoverImage; - chapter.CoverImage = string.Empty; - chapter.CoverImageLocked = false; + readingList.CoverImage = filePath; + readingList.CoverImageLocked = true; + _unitOfWork.ReadingListRepository.Update(readingList); + } + + if (_unitOfWork.HasChanges()) + { + await _unitOfWork.CommitAsync(); + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(readingList.Id, MessageFactoryEntityTypes.ReadingList), false); + return Ok(); + } + + } + catch (Exception e) + { + _logger.LogError(e, "There was an issue uploading cover image for Reading List {Id}", uploadFileDto.Id); + await _unitOfWork.RollbackAsync(); + } + + return BadRequest("Unable to save cover image to Reading List"); + } + + /// + /// Replaces chapter cover image and locks it with a base64 encoded image. This will update the parent volume's cover image. + /// + /// + /// + [Authorize(Policy = "RequireAdminRole")] + [RequestSizeLimit(8_000_000)] + [HttpPost("chapter")] + public async Task UploadChapterCoverImageFromUrl(UploadFileDto uploadFileDto) + { + // Check if Url is non empty, request the image and place in temp, then ask image service to handle it. + // See if we can do this all in memory without touching underlying system + if (string.IsNullOrEmpty(uploadFileDto.Url)) + { + return BadRequest("You must pass a url to use"); + } + + try + { + var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(uploadFileDto.Id); + var filePath = _imageService.CreateThumbnailFromBase64(uploadFileDto.Url, $"{ImageService.GetChapterFormat(uploadFileDto.Id, chapter.VolumeId)}"); + + if (!string.IsNullOrEmpty(filePath)) + { + chapter.CoverImage = filePath; + chapter.CoverImageLocked = true; _unitOfWork.ChapterRepository.Update(chapter); var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId); volume.CoverImage = chapter.CoverImage; _unitOfWork.VolumeRepository.Update(volume); - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); - - if (_unitOfWork.HasChanges()) - { - await _unitOfWork.CommitAsync(); - System.IO.File.Delete(originalFile); - _taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id, true); - return Ok(); - } - } - catch (Exception e) + + if (_unitOfWork.HasChanges()) { - _logger.LogError(e, "There was an issue resetting cover lock for Chapter {Id}", uploadFileDto.Id); - await _unitOfWork.RollbackAsync(); + await _unitOfWork.CommitAsync(); + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(chapter.VolumeId, MessageFactoryEntityTypes.Volume), false); + await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate, + MessageFactory.CoverUpdateEvent(chapter.Id, MessageFactoryEntityTypes.Chapter), false); + return Ok(); } - return BadRequest("Unable to resetting cover lock for Chapter"); + } + catch (Exception e) + { + _logger.LogError(e, "There was an issue uploading cover image for Chapter {Id}", uploadFileDto.Id); + await _unitOfWork.RollbackAsync(); } + return BadRequest("Unable to save cover image to Chapter"); } + + /// + /// Replaces chapter cover image and locks it with a base64 encoded image. This will update the parent volume's cover image. + /// + /// Does not use Url property + /// + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("reset-chapter-lock")] + public async Task ResetChapterLock(UploadFileDto uploadFileDto) + { + try + { + var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(uploadFileDto.Id); + var originalFile = chapter.CoverImage; + chapter.CoverImage = string.Empty; + chapter.CoverImageLocked = false; + _unitOfWork.ChapterRepository.Update(chapter); + var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId); + volume.CoverImage = chapter.CoverImage; + _unitOfWork.VolumeRepository.Update(volume); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); + + if (_unitOfWork.HasChanges()) + { + await _unitOfWork.CommitAsync(); + System.IO.File.Delete(originalFile); + _taskScheduler.RefreshSeriesMetadata(series.LibraryId, series.Id, true); + return Ok(); + } + + } + catch (Exception e) + { + _logger.LogError(e, "There was an issue resetting cover lock for Chapter {Id}", uploadFileDto.Id); + await _unitOfWork.RollbackAsync(); + } + + return BadRequest("Unable to resetting cover lock for Chapter"); + } + } diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index f74fac133..72d99e13c 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -13,112 +13,113 @@ using AutoMapper; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; -namespace API.Controllers +namespace API.Controllers; + +[Authorize] +public class UsersController : BaseApiController { - [Authorize] - public class UsersController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IMapper _mapper; + private readonly IEventHub _eventHub; + + public UsersController(IUnitOfWork unitOfWork, IMapper mapper, IEventHub eventHub) { - private readonly IUnitOfWork _unitOfWork; - private readonly IMapper _mapper; - private readonly IEventHub _eventHub; + _unitOfWork = unitOfWork; + _mapper = mapper; + _eventHub = eventHub; + } - public UsersController(IUnitOfWork unitOfWork, IMapper mapper, IEventHub eventHub) + [Authorize(Policy = "RequireAdminRole")] + [HttpDelete("delete-user")] + public async Task DeleteUser(string username) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); + _unitOfWork.UserRepository.Delete(user); + + if (await _unitOfWork.CommitAsync()) return Ok(); + + return BadRequest("Could not delete the user."); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpGet] + public async Task>> GetUsers() + { + return Ok(await _unitOfWork.UserRepository.GetEmailConfirmedMemberDtosAsync()); + } + + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("pending")] + public async Task>> GetPendingUsers() + { + return Ok(await _unitOfWork.UserRepository.GetPendingMemberDtosAsync()); + } + + + [HttpGet("has-reading-progress")] + public async Task> HasReadingProgress(int libraryId) + { + var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None); + return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, userId)); + } + + [HttpGet("has-library-access")] + public async Task> HasLibraryAccess(int libraryId) + { + var libs = await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername()); + return Ok(libs.Any(x => x.Id == libraryId)); + } + + [HttpPost("update-preferences")] + public async Task> UpdatePreferences(UserPreferencesDto preferencesDto) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), + AppUserIncludes.UserPreferences); + var existingPreferences = user.UserPreferences; + + preferencesDto.Theme ??= await _unitOfWork.SiteThemeRepository.GetDefaultTheme(); + + existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; + existingPreferences.ScalingOption = preferencesDto.ScalingOption; + existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; + existingPreferences.AutoCloseMenu = preferencesDto.AutoCloseMenu; + existingPreferences.ShowScreenHints = preferencesDto.ShowScreenHints; + existingPreferences.ReaderMode = preferencesDto.ReaderMode; + existingPreferences.LayoutMode = preferencesDto.LayoutMode; + existingPreferences.BackgroundColor = string.IsNullOrEmpty(preferencesDto.BackgroundColor) ? "#000000" : preferencesDto.BackgroundColor; + existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; + existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; + existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; + existingPreferences.BookReaderFontSize = preferencesDto.BookReaderFontSize; + existingPreferences.BookReaderTapToPaginate = preferencesDto.BookReaderTapToPaginate; + existingPreferences.BookReaderReadingDirection = preferencesDto.BookReaderReadingDirection; + existingPreferences.BookThemeName = preferencesDto.BookReaderThemeName; + existingPreferences.BookReaderLayoutMode = preferencesDto.BookReaderLayoutMode; + existingPreferences.BookReaderImmersiveMode = preferencesDto.BookReaderImmersiveMode; + existingPreferences.GlobalPageLayoutMode = preferencesDto.GlobalPageLayoutMode; + existingPreferences.BlurUnreadSummaries = preferencesDto.BlurUnreadSummaries; + existingPreferences.Theme = await _unitOfWork.SiteThemeRepository.GetThemeById(preferencesDto.Theme.Id); + existingPreferences.LayoutMode = preferencesDto.LayoutMode; + existingPreferences.PromptForDownloadSize = preferencesDto.PromptForDownloadSize; + existingPreferences.NoTransitions = preferencesDto.NoTransitions; + + _unitOfWork.UserRepository.Update(existingPreferences); + + if (await _unitOfWork.CommitAsync()) { - _unitOfWork = unitOfWork; - _mapper = mapper; - _eventHub = eventHub; + await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); + return Ok(preferencesDto); } - [Authorize(Policy = "RequireAdminRole")] - [HttpDelete("delete-user")] - public async Task DeleteUser(string username) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); - _unitOfWork.UserRepository.Delete(user); + return BadRequest("There was an issue saving preferences."); + } - if (await _unitOfWork.CommitAsync()) return Ok(); + [HttpGet("get-preferences")] + public async Task> GetPreferences() + { + return _mapper.Map( + await _unitOfWork.UserRepository.GetPreferencesAsync(User.GetUsername())); - return BadRequest("Could not delete the user."); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpGet] - public async Task>> GetUsers() - { - return Ok(await _unitOfWork.UserRepository.GetEmailConfirmedMemberDtosAsync()); - } - - [Authorize(Policy = "RequireAdminRole")] - [HttpGet("pending")] - public async Task>> GetPendingUsers() - { - return Ok(await _unitOfWork.UserRepository.GetPendingMemberDtosAsync()); - } - - - [HttpGet("has-reading-progress")] - public async Task> HasReadingProgress(int libraryId) - { - var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); - var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None); - return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, userId)); - } - - [HttpGet("has-library-access")] - public async Task> HasLibraryAccess(int libraryId) - { - var libs = await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername()); - return Ok(libs.Any(x => x.Id == libraryId)); - } - - [HttpPost("update-preferences")] - public async Task> UpdatePreferences(UserPreferencesDto preferencesDto) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), - AppUserIncludes.UserPreferences); - var existingPreferences = user.UserPreferences; - - existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; - existingPreferences.ScalingOption = preferencesDto.ScalingOption; - existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; - existingPreferences.AutoCloseMenu = preferencesDto.AutoCloseMenu; - existingPreferences.ShowScreenHints = preferencesDto.ShowScreenHints; - existingPreferences.ReaderMode = preferencesDto.ReaderMode; - existingPreferences.LayoutMode = preferencesDto.LayoutMode; - existingPreferences.BackgroundColor = string.IsNullOrEmpty(preferencesDto.BackgroundColor) ? "#000000" : preferencesDto.BackgroundColor; - existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; - existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; - existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; - existingPreferences.BookReaderFontSize = preferencesDto.BookReaderFontSize; - existingPreferences.BookReaderTapToPaginate = preferencesDto.BookReaderTapToPaginate; - existingPreferences.BookReaderReadingDirection = preferencesDto.BookReaderReadingDirection; - preferencesDto.Theme ??= await _unitOfWork.SiteThemeRepository.GetDefaultTheme(); - existingPreferences.BookThemeName = preferencesDto.BookReaderThemeName; - existingPreferences.BookReaderLayoutMode = preferencesDto.BookReaderLayoutMode; - existingPreferences.BookReaderImmersiveMode = preferencesDto.BookReaderImmersiveMode; - existingPreferences.GlobalPageLayoutMode = preferencesDto.GlobalPageLayoutMode; - existingPreferences.BlurUnreadSummaries = preferencesDto.BlurUnreadSummaries; - existingPreferences.Theme = await _unitOfWork.SiteThemeRepository.GetThemeById(preferencesDto.Theme.Id); - existingPreferences.LayoutMode = preferencesDto.LayoutMode; - existingPreferences.PromptForDownloadSize = preferencesDto.PromptForDownloadSize; - - _unitOfWork.UserRepository.Update(existingPreferences); - - if (await _unitOfWork.CommitAsync()) - { - await _eventHub.SendMessageToAsync(MessageFactory.UserUpdate, MessageFactory.UserUpdateEvent(user.Id, user.UserName), user.Id); - return Ok(preferencesDto); - } - - return BadRequest("There was an issue saving preferences."); - } - - [HttpGet("get-preferences")] - public async Task> GetPreferences() - { - return _mapper.Map( - await _unitOfWork.UserRepository.GetPreferencesAsync(User.GetUsername())); - - } } } diff --git a/API/DTOs/Account/AgeRestrictionDto.cs b/API/DTOs/Account/AgeRestrictionDto.cs new file mode 100644 index 000000000..ad4534b35 --- /dev/null +++ b/API/DTOs/Account/AgeRestrictionDto.cs @@ -0,0 +1,16 @@ +using API.Entities.Enums; + +namespace API.DTOs.Account; + +public class AgeRestrictionDto +{ + /// + /// The maximum age rating a user has access to. -1 if not applicable + /// + public AgeRating AgeRating { get; set; } = AgeRating.NotApplicable; + /// + /// Are Unknowns explicitly allowed against age rating + /// + /// Unknown is always lowest and default age rating. Setting this to false will ensure Teen age rating applies and unknowns are still filtered + public bool IncludeUnknowns { get; set; } = false; +} diff --git a/API/DTOs/Account/ConfirmEmailUpdateDto.cs b/API/DTOs/Account/ConfirmEmailUpdateDto.cs new file mode 100644 index 000000000..63d31340a --- /dev/null +++ b/API/DTOs/Account/ConfirmEmailUpdateDto.cs @@ -0,0 +1,11 @@ +using System.ComponentModel.DataAnnotations; + +namespace API.DTOs.Account; + +public class ConfirmEmailUpdateDto +{ + [Required] + public string Email { get; set; } + [Required] + public string Token { get; set; } +} diff --git a/API/DTOs/Account/InviteUserDto.cs b/API/DTOs/Account/InviteUserDto.cs index 9d0d9416d..9532b86dd 100644 --- a/API/DTOs/Account/InviteUserDto.cs +++ b/API/DTOs/Account/InviteUserDto.cs @@ -1,5 +1,6 @@ using System.Collections.Generic; using System.ComponentModel.DataAnnotations; +using API.Entities.Enums; namespace API.DTOs.Account; @@ -16,4 +17,8 @@ public class InviteUserDto /// A list of libraries to grant access to /// public IList Libraries { get; init; } + /// + /// An Age Rating which will limit the account to seeing everything equal to or below said rating. + /// + public AgeRestrictionDto AgeRestriction { get; set; } } diff --git a/API/DTOs/Account/LoginDto.cs b/API/DTOs/Account/LoginDto.cs index a21e9868f..44ccc5fc5 100644 --- a/API/DTOs/Account/LoginDto.cs +++ b/API/DTOs/Account/LoginDto.cs @@ -1,8 +1,7 @@ -namespace API.DTOs.Account +namespace API.DTOs.Account; + +public class LoginDto { - public class LoginDto - { - public string Username { get; init; } - public string Password { get; set; } - } + public string Username { get; init; } + public string Password { get; set; } } diff --git a/API/DTOs/Account/ResetPasswordDto.cs b/API/DTOs/Account/ResetPasswordDto.cs index 563aad9f4..9fa42d8ac 100644 --- a/API/DTOs/Account/ResetPasswordDto.cs +++ b/API/DTOs/Account/ResetPasswordDto.cs @@ -1,23 +1,22 @@ using System.ComponentModel.DataAnnotations; -namespace API.DTOs.Account +namespace API.DTOs.Account; + +public class ResetPasswordDto { - public class ResetPasswordDto - { - /// - /// The Username of the User - /// - [Required] - public string UserName { get; init; } - /// - /// The new password - /// - [Required] - [StringLength(32, MinimumLength = 6)] - public string Password { get; init; } - /// - /// The old, existing password. If an admin is performing the change, this is not required. Otherwise, it is. - /// - public string OldPassword { get; init; } - } + /// + /// The Username of the User + /// + [Required] + public string UserName { get; init; } + /// + /// The new password + /// + [Required] + [StringLength(32, MinimumLength = 6)] + public string Password { get; init; } + /// + /// The old, existing password. If an admin is performing the change, this is not required. Otherwise, it is. + /// + public string OldPassword { get; init; } } diff --git a/API/DTOs/Account/UpdateAgeRestrictionDto.cs b/API/DTOs/Account/UpdateAgeRestrictionDto.cs new file mode 100644 index 000000000..ef6be1bba --- /dev/null +++ b/API/DTOs/Account/UpdateAgeRestrictionDto.cs @@ -0,0 +1,12 @@ +using System.ComponentModel.DataAnnotations; +using API.Entities.Enums; + +namespace API.DTOs.Account; + +public class UpdateAgeRestrictionDto +{ + [Required] + public AgeRating AgeRating { get; set; } + [Required] + public bool IncludeUnknowns { get; set; } +} diff --git a/API/DTOs/Account/UpdateEmailDto.cs b/API/DTOs/Account/UpdateEmailDto.cs new file mode 100644 index 000000000..9b92095d8 --- /dev/null +++ b/API/DTOs/Account/UpdateEmailDto.cs @@ -0,0 +1,6 @@ +namespace API.DTOs.Account; + +public class UpdateEmailDto +{ + public string Email { get; set; } +} diff --git a/API/DTOs/Account/UpdateEmailResponse.cs b/API/DTOs/Account/UpdateEmailResponse.cs new file mode 100644 index 000000000..4f9b816c1 --- /dev/null +++ b/API/DTOs/Account/UpdateEmailResponse.cs @@ -0,0 +1,14 @@ +namespace API.DTOs.Account; + +public class UpdateEmailResponse +{ + /// + /// Did the user not have an existing email + /// + /// This informs the user to check the new email address + public bool HadNoExistingEmail { get; set; } + /// + /// Was an email sent (ie is this server accessible) + /// + public bool EmailSent { get; set; } +} diff --git a/API/DTOs/Account/UpdateUserDto.cs b/API/DTOs/Account/UpdateUserDto.cs index f3afb98a5..7a928690c 100644 --- a/API/DTOs/Account/UpdateUserDto.cs +++ b/API/DTOs/Account/UpdateUserDto.cs @@ -1,4 +1,7 @@ using System.Collections.Generic; +using System.Text.Json.Serialization; +using API.Entities.Enums; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion.Internal; namespace API.DTOs.Account; @@ -6,18 +9,16 @@ public record UpdateUserDto { public int UserId { get; set; } public string Username { get; set; } - /// - /// This field will not result in any change to the User model. Changing email is not supported. - /// - public string Email { get; set; } - /// /// List of Roles to assign to user. If admin not present, Pleb will be applied. /// If admin present, all libraries will be granted access and will ignore those from DTO. - /// public IList Roles { get; init; } /// /// A list of libraries to grant access to /// public IList Libraries { get; init; } + /// + /// An Age Rating which will limit the account to seeing everything equal to or below said rating. + /// + public AgeRestrictionDto AgeRestriction { get; init; } } diff --git a/API/DTOs/ChapterDto.cs b/API/DTOs/ChapterDto.cs index beccf26d0..60e08b554 100644 --- a/API/DTOs/ChapterDto.cs +++ b/API/DTOs/ChapterDto.cs @@ -5,89 +5,88 @@ using API.DTOs.Reader; using API.Entities.Enums; using API.Entities.Interfaces; -namespace API.DTOs -{ - /// - /// A Chapter is the lowest grouping of a reading medium. A Chapter contains a set of MangaFiles which represents the underlying - /// file (abstracted from type). - /// - public class ChapterDto : IHasReadTimeEstimate - { - public int Id { get; init; } - /// - /// Range of chapters. Chapter 2-4 -> "2-4". Chapter 2 -> "2". - /// - public string Range { get; init; } - /// - /// Smallest number of the Range. - /// - public string Number { get; init; } - /// - /// Total number of pages in all MangaFiles - /// - public int Pages { get; init; } - /// - /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename - /// - public bool IsSpecial { get; init; } - /// - /// Used for books/specials to display custom title. For non-specials/books, will be set to - /// - public string Title { get; set; } - /// - /// The files that represent this Chapter - /// - public ICollection Files { get; init; } - /// - /// Calculated at API time. Number of pages read for this Chapter for logged in user. - /// - public int PagesRead { get; set; } - /// - /// If the Cover Image is locked for this entity - /// - public bool CoverImageLocked { get; set; } - /// - /// Volume Id this Chapter belongs to - /// - public int VolumeId { get; init; } - /// - /// When chapter was created - /// - public DateTime Created { get; init; } - /// - /// When the chapter was released. - /// - /// Metadata field - public DateTime ReleaseDate { get; init; } - /// - /// Title of the Chapter/Issue - /// - /// Metadata field - public string TitleName { get; set; } - /// - /// Summary of the Chapter - /// - /// This is not set normally, only for Series Detail - public string Summary { get; init; } - /// - /// Age Rating for the issue/chapter - /// - public AgeRating AgeRating { get; init; } - /// - /// Total words in a Chapter (books only) - /// - public long WordCount { get; set; } = 0L; +namespace API.DTOs; - /// - /// Formatted Volume title ie) Volume 2. - /// - /// Only available when fetched from Series Detail API - public string VolumeTitle { get; set; } = string.Empty; - /// - public int MinHoursToRead { get; set; } - /// - public int MaxHoursToRead { get; set; } - /// - public int AvgHoursToRead { get; set; } - } +/// +/// A Chapter is the lowest grouping of a reading medium. A Chapter contains a set of MangaFiles which represents the underlying +/// file (abstracted from type). +/// +public class ChapterDto : IHasReadTimeEstimate +{ + public int Id { get; init; } + /// + /// Range of chapters. Chapter 2-4 -> "2-4". Chapter 2 -> "2". + /// + public string Range { get; init; } + /// + /// Smallest number of the Range. + /// + public string Number { get; init; } + /// + /// Total number of pages in all MangaFiles + /// + public int Pages { get; init; } + /// + /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename + /// + public bool IsSpecial { get; init; } + /// + /// Used for books/specials to display custom title. For non-specials/books, will be set to + /// + public string Title { get; set; } + /// + /// The files that represent this Chapter + /// + public ICollection Files { get; init; } + /// + /// Calculated at API time. Number of pages read for this Chapter for logged in user. + /// + public int PagesRead { get; set; } + /// + /// If the Cover Image is locked for this entity + /// + public bool CoverImageLocked { get; set; } + /// + /// Volume Id this Chapter belongs to + /// + public int VolumeId { get; init; } + /// + /// When chapter was created + /// + public DateTime Created { get; init; } + /// + /// When the chapter was released. + /// + /// Metadata field + public DateTime ReleaseDate { get; init; } + /// + /// Title of the Chapter/Issue + /// + /// Metadata field + public string TitleName { get; set; } + /// + /// Summary of the Chapter + /// + /// This is not set normally, only for Series Detail + public string Summary { get; init; } + /// + /// Age Rating for the issue/chapter + /// + public AgeRating AgeRating { get; init; } + /// + /// Total words in a Chapter (books only) + /// + public long WordCount { get; set; } = 0L; + + /// + /// Formatted Volume title ie) Volume 2. + /// + /// Only available when fetched from Series Detail API + public string VolumeTitle { get; set; } = string.Empty; + /// + public int MinHoursToRead { get; set; } + /// + public int MaxHoursToRead { get; set; } + /// + public int AvgHoursToRead { get; set; } } diff --git a/API/DTOs/CollectionTags/CollectionTagBulkAddDto.cs b/API/DTOs/CollectionTags/CollectionTagBulkAddDto.cs index ac28e81cb..7b9ebc94d 100644 --- a/API/DTOs/CollectionTags/CollectionTagBulkAddDto.cs +++ b/API/DTOs/CollectionTags/CollectionTagBulkAddDto.cs @@ -1,18 +1,17 @@ using System.Collections.Generic; -namespace API.DTOs.CollectionTags +namespace API.DTOs.CollectionTags; + +public class CollectionTagBulkAddDto { - public class CollectionTagBulkAddDto - { - /// - /// Collection Tag Id - /// - /// Can be 0 which then will use Title to create a tag - public int CollectionTagId { get; init; } - public string CollectionTagTitle { get; init; } - /// - /// Series Ids to add onto Collection Tag - /// - public IEnumerable SeriesIds { get; init; } - } + /// + /// Collection Tag Id + /// + /// Can be 0 which then will use Title to create a tag + public int CollectionTagId { get; init; } + public string CollectionTagTitle { get; init; } + /// + /// Series Ids to add onto Collection Tag + /// + public IEnumerable SeriesIds { get; init; } } diff --git a/API/DTOs/CollectionTags/CollectionTagDto.cs b/API/DTOs/CollectionTags/CollectionTagDto.cs index 490e7d1ad..8cb68cc06 100644 --- a/API/DTOs/CollectionTags/CollectionTagDto.cs +++ b/API/DTOs/CollectionTags/CollectionTagDto.cs @@ -1,15 +1,14 @@ -namespace API.DTOs.CollectionTags +namespace API.DTOs.CollectionTags; + +public class CollectionTagDto { - public class CollectionTagDto - { - public int Id { get; set; } - public string Title { get; set; } - public string Summary { get; set; } - public bool Promoted { get; set; } - /// - /// The cover image string. This is used on Frontend to show or hide the Cover Image - /// - public string CoverImage { get; set; } - public bool CoverImageLocked { get; set; } - } + public int Id { get; set; } + public string Title { get; set; } + public string Summary { get; set; } + public bool Promoted { get; set; } + /// + /// The cover image string. This is used on Frontend to show or hide the Cover Image + /// + public string CoverImage { get; set; } + public bool CoverImageLocked { get; set; } } diff --git a/API/DTOs/CollectionTags/UpdateSeriesForTagDto.cs b/API/DTOs/CollectionTags/UpdateSeriesForTagDto.cs index 1a844ee18..2381df285 100644 --- a/API/DTOs/CollectionTags/UpdateSeriesForTagDto.cs +++ b/API/DTOs/CollectionTags/UpdateSeriesForTagDto.cs @@ -1,10 +1,9 @@ using System.Collections.Generic; -namespace API.DTOs.CollectionTags +namespace API.DTOs.CollectionTags; + +public class UpdateSeriesForTagDto { - public class UpdateSeriesForTagDto - { - public CollectionTagDto Tag { get; init; } - public IEnumerable SeriesIdsToRemove { get; init; } - } + public CollectionTagDto Tag { get; init; } + public IEnumerable SeriesIdsToRemove { get; init; } } diff --git a/API/DTOs/CreateLibraryDto.cs b/API/DTOs/CreateLibraryDto.cs index f9aa14639..151bcfeba 100644 --- a/API/DTOs/CreateLibraryDto.cs +++ b/API/DTOs/CreateLibraryDto.cs @@ -2,16 +2,15 @@ using System.ComponentModel.DataAnnotations; using API.Entities.Enums; -namespace API.DTOs +namespace API.DTOs; + +public class CreateLibraryDto { - public class CreateLibraryDto - { - [Required] - public string Name { get; init; } - [Required] - public LibraryType Type { get; init; } - [Required] - [MinLength(1)] - public IEnumerable Folders { get; init; } - } -} \ No newline at end of file + [Required] + public string Name { get; init; } + [Required] + public LibraryType Type { get; init; } + [Required] + [MinLength(1)] + public IEnumerable Folders { get; init; } +} diff --git a/API/DTOs/DeleteSeriesDto.cs b/API/DTOs/DeleteSeriesDto.cs index 6908c21ac..a363d0568 100644 --- a/API/DTOs/DeleteSeriesDto.cs +++ b/API/DTOs/DeleteSeriesDto.cs @@ -1,9 +1,8 @@ using System.Collections.Generic; -namespace API.DTOs +namespace API.DTOs; + +public class DeleteSeriesDto { - public class DeleteSeriesDto - { - public IList SeriesIds { get; set; } - } + public IList SeriesIds { get; set; } } diff --git a/API/DTOs/Device/CreateDeviceDto.cs b/API/DTOs/Device/CreateDeviceDto.cs new file mode 100644 index 000000000..bdcdde194 --- /dev/null +++ b/API/DTOs/Device/CreateDeviceDto.cs @@ -0,0 +1,20 @@ +using System.ComponentModel.DataAnnotations; +using System.Runtime.InteropServices; +using API.Entities.Enums.Device; + +namespace API.DTOs.Device; + +public class CreateDeviceDto +{ + [Required] + public string Name { get; set; } + /// + /// Platform of the device. If not know, defaults to "Custom" + /// + [Required] + public DevicePlatform Platform { get; set; } + [Required] + public string EmailAddress { get; set; } + + +} diff --git a/API/DTOs/Device/DeviceDto.cs b/API/DTOs/Device/DeviceDto.cs new file mode 100644 index 000000000..e5344f31e --- /dev/null +++ b/API/DTOs/Device/DeviceDto.cs @@ -0,0 +1,33 @@ +using System; +using API.Entities.Enums.Device; + +namespace API.DTOs.Device; + +/// +/// A Device is an entity that can receive data from Kavita (kindle) +/// +public class DeviceDto +{ + /// + /// The device Id + /// + public int Id { get; set; } + /// + /// A name given to this device + /// + /// If this device is web, this will be the browser name + /// Pixel 3a, John's Kindle + public string Name { get; set; } + /// + /// An email address associated with the device (ie Kindle). Will be used with Send to functionality + /// + public string EmailAddress { get; set; } + /// + /// Platform (ie) Windows 10 + /// + public DevicePlatform Platform { get; set; } + /// + /// Last time this device was used to send a file + /// + public DateTime LastUsed { get; set; } +} diff --git a/API/DTOs/Device/SendToDeviceDto.cs b/API/DTOs/Device/SendToDeviceDto.cs new file mode 100644 index 000000000..411f20ea0 --- /dev/null +++ b/API/DTOs/Device/SendToDeviceDto.cs @@ -0,0 +1,9 @@ +using System.Collections.Generic; + +namespace API.DTOs.Device; + +public class SendToDeviceDto +{ + public int DeviceId { get; set; } + public IReadOnlyList ChapterIds { get; set; } +} diff --git a/API/DTOs/Device/UpdateDeviceDto.cs b/API/DTOs/Device/UpdateDeviceDto.cs new file mode 100644 index 000000000..201adcb5d --- /dev/null +++ b/API/DTOs/Device/UpdateDeviceDto.cs @@ -0,0 +1,19 @@ +using System.ComponentModel.DataAnnotations; +using API.Entities.Enums.Device; + +namespace API.DTOs.Device; + +public class UpdateDeviceDto +{ + [Required] + public int Id { get; set; } + [Required] + public string Name { get; set; } + /// + /// Platform of the device. If not know, defaults to "Custom" + /// + [Required] + public DevicePlatform Platform { get; set; } + [Required] + public string EmailAddress { get; set; } +} diff --git a/API/DTOs/Downloads/DownloadBookmarkDto.cs b/API/DTOs/Downloads/DownloadBookmarkDto.cs index b1158ff23..d70cd25ac 100644 --- a/API/DTOs/Downloads/DownloadBookmarkDto.cs +++ b/API/DTOs/Downloads/DownloadBookmarkDto.cs @@ -2,11 +2,10 @@ using System.ComponentModel.DataAnnotations; using API.DTOs.Reader; -namespace API.DTOs.Downloads +namespace API.DTOs.Downloads; + +public class DownloadBookmarkDto { - public class DownloadBookmarkDto - { - [Required] - public IEnumerable Bookmarks { get; set; } - } + [Required] + public IEnumerable Bookmarks { get; set; } } diff --git a/API/DTOs/Email/SendToDto.cs b/API/DTOs/Email/SendToDto.cs new file mode 100644 index 000000000..254f7fd09 --- /dev/null +++ b/API/DTOs/Email/SendToDto.cs @@ -0,0 +1,9 @@ +using System.Collections.Generic; + +namespace API.DTOs.Email; + +public class SendToDto +{ + public string DestinationEmail { get; set; } + public IEnumerable FilePaths { get; set; } +} diff --git a/API/DTOs/Filtering/FilterDto.cs b/API/DTOs/Filtering/FilterDto.cs index 892f9e6b9..f6c47f71f 100644 --- a/API/DTOs/Filtering/FilterDto.cs +++ b/API/DTOs/Filtering/FilterDto.cs @@ -3,101 +3,106 @@ using System.Runtime.InteropServices; using API.Entities; using API.Entities.Enums; -namespace API.DTOs.Filtering +namespace API.DTOs.Filtering; + +public class FilterDto { - public class FilterDto - { - /// - /// The type of Formats you want to be returned. An empty list will return all formats back - /// - public IList Formats { get; init; } = new List(); + /// + /// The type of Formats you want to be returned. An empty list will return all formats back + /// + public IList Formats { get; init; } = new List(); - /// - /// The progress you want to be returned. This can be bitwise manipulated. Defaults to all applicable states. - /// - public ReadStatus ReadStatus { get; init; } = new ReadStatus(); + /// + /// The progress you want to be returned. This can be bitwise manipulated. Defaults to all applicable states. + /// + public ReadStatus ReadStatus { get; init; } = new ReadStatus(); - /// - /// A list of library ids to restrict search to. Defaults to all libraries by passing empty list - /// - public IList Libraries { get; init; } = new List(); - /// - /// A list of Genre ids to restrict search to. Defaults to all genres by passing an empty list - /// - public IList Genres { get; init; } = new List(); - /// - /// A list of Writers to restrict search to. Defaults to all Writers by passing an empty list - /// - public IList Writers { get; init; } = new List(); - /// - /// A list of Penciller ids to restrict search to. Defaults to all Pencillers by passing an empty list - /// - public IList Penciller { get; init; } = new List(); - /// - /// A list of Inker ids to restrict search to. Defaults to all Inkers by passing an empty list - /// - public IList Inker { get; init; } = new List(); - /// - /// A list of Colorist ids to restrict search to. Defaults to all Colorists by passing an empty list - /// - public IList Colorist { get; init; } = new List(); - /// - /// A list of Letterer ids to restrict search to. Defaults to all Letterers by passing an empty list - /// - public IList Letterer { get; init; } = new List(); - /// - /// A list of CoverArtist ids to restrict search to. Defaults to all CoverArtists by passing an empty list - /// - public IList CoverArtist { get; init; } = new List(); - /// - /// A list of Editor ids to restrict search to. Defaults to all Editors by passing an empty list - /// - public IList Editor { get; init; } = new List(); - /// - /// A list of Publisher ids to restrict search to. Defaults to all Publishers by passing an empty list - /// - public IList Publisher { get; init; } = new List(); - /// - /// A list of Character ids to restrict search to. Defaults to all Characters by passing an empty list - /// - public IList Character { get; init; } = new List(); - /// - /// A list of Translator ids to restrict search to. Defaults to all Translatorss by passing an empty list - /// - public IList Translators { get; init; } = new List(); - /// - /// A list of Collection Tag ids to restrict search to. Defaults to all Collection Tags by passing an empty list - /// - public IList CollectionTags { get; init; } = new List(); - /// - /// A list of Tag ids to restrict search to. Defaults to all Tags by passing an empty list - /// - public IList Tags { get; init; } = new List(); - /// - /// Will return back everything with the rating and above - /// - /// - public int Rating { get; init; } - /// - /// Sorting Options for a query. Defaults to null, which uses the queries natural sorting order - /// - public SortOptions SortOptions { get; set; } = null; - /// - /// Age Ratings. Empty list will return everything back - /// - public IList AgeRating { get; init; } = new List(); - /// - /// Languages (ISO 639-1 code) to filter by. Empty list will return everything back - /// - public IList Languages { get; init; } = new List(); - /// - /// Publication statuses to filter by. Empty list will return everything back - /// - public IList PublicationStatus { get; init; } = new List(); + /// + /// A list of library ids to restrict search to. Defaults to all libraries by passing empty list + /// + public IList Libraries { get; init; } = new List(); + /// + /// A list of Genre ids to restrict search to. Defaults to all genres by passing an empty list + /// + public IList Genres { get; init; } = new List(); + /// + /// A list of Writers to restrict search to. Defaults to all Writers by passing an empty list + /// + public IList Writers { get; init; } = new List(); + /// + /// A list of Penciller ids to restrict search to. Defaults to all Pencillers by passing an empty list + /// + public IList Penciller { get; init; } = new List(); + /// + /// A list of Inker ids to restrict search to. Defaults to all Inkers by passing an empty list + /// + public IList Inker { get; init; } = new List(); + /// + /// A list of Colorist ids to restrict search to. Defaults to all Colorists by passing an empty list + /// + public IList Colorist { get; init; } = new List(); + /// + /// A list of Letterer ids to restrict search to. Defaults to all Letterers by passing an empty list + /// + public IList Letterer { get; init; } = new List(); + /// + /// A list of CoverArtist ids to restrict search to. Defaults to all CoverArtists by passing an empty list + /// + public IList CoverArtist { get; init; } = new List(); + /// + /// A list of Editor ids to restrict search to. Defaults to all Editors by passing an empty list + /// + public IList Editor { get; init; } = new List(); + /// + /// A list of Publisher ids to restrict search to. Defaults to all Publishers by passing an empty list + /// + public IList Publisher { get; init; } = new List(); + /// + /// A list of Character ids to restrict search to. Defaults to all Characters by passing an empty list + /// + public IList Character { get; init; } = new List(); + /// + /// A list of Translator ids to restrict search to. Defaults to all Translatorss by passing an empty list + /// + public IList Translators { get; init; } = new List(); + /// + /// A list of Collection Tag ids to restrict search to. Defaults to all Collection Tags by passing an empty list + /// + public IList CollectionTags { get; init; } = new List(); + /// + /// A list of Tag ids to restrict search to. Defaults to all Tags by passing an empty list + /// + public IList Tags { get; init; } = new List(); + /// + /// Will return back everything with the rating and above + /// + /// + public int Rating { get; init; } + /// + /// Sorting Options for a query. Defaults to null, which uses the queries natural sorting order + /// + public SortOptions SortOptions { get; set; } = null; + /// + /// Age Ratings. Empty list will return everything back + /// + public IList AgeRating { get; init; } = new List(); + /// + /// Languages (ISO 639-1 code) to filter by. Empty list will return everything back + /// + public IList Languages { get; init; } = new List(); + /// + /// Publication statuses to filter by. Empty list will return everything back + /// + public IList PublicationStatus { get; init; } = new List(); - /// - /// An optional name string to filter by. Empty string will ignore. - /// - public string SeriesNameQuery { get; init; } = string.Empty; - } + /// + /// An optional name string to filter by. Empty string will ignore. + /// + public string SeriesNameQuery { get; init; } = string.Empty; + #nullable enable + /// + /// An optional release year to filter by. Null will ignore. You can pass 0 for an individual field to ignore it. + /// + public Range? ReleaseYearRange { get; init; } = null; + #nullable disable } diff --git a/API/DTOs/Filtering/Range.cs b/API/DTOs/Filtering/Range.cs new file mode 100644 index 000000000..383ce7887 --- /dev/null +++ b/API/DTOs/Filtering/Range.cs @@ -0,0 +1,14 @@ +namespace API.DTOs.Filtering; +/// +/// Represents a range between two int/float/double +/// +public class Range +{ + public T Min { get; set; } + public T Max { get; set; } + + public override string ToString() + { + return $"{Min}-{Max}"; + } +} diff --git a/API/DTOs/Filtering/SortField.cs b/API/DTOs/Filtering/SortField.cs index fbb1d511a..918b74279 100644 --- a/API/DTOs/Filtering/SortField.cs +++ b/API/DTOs/Filtering/SortField.cs @@ -21,5 +21,9 @@ public enum SortField /// /// Time it takes to read. Uses Average. /// - TimeToRead = 5 + TimeToRead = 5, + /// + /// Release Year of the Series + /// + ReleaseYear = 6 } diff --git a/API/DTOs/LibraryDto.cs b/API/DTOs/LibraryDto.cs index 9289cfa21..4226acbd7 100644 --- a/API/DTOs/LibraryDto.cs +++ b/API/DTOs/LibraryDto.cs @@ -2,17 +2,16 @@ using System.Collections.Generic; using API.Entities.Enums; -namespace API.DTOs +namespace API.DTOs; + +public class LibraryDto { - public class LibraryDto - { - public int Id { get; init; } - public string Name { get; init; } - /// - /// Last time Library was scanned - /// - public DateTime LastScanned { get; init; } - public LibraryType Type { get; init; } - public ICollection Folders { get; init; } - } + public int Id { get; init; } + public string Name { get; init; } + /// + /// Last time Library was scanned + /// + public DateTime LastScanned { get; init; } + public LibraryType Type { get; init; } + public ICollection Folders { get; init; } } diff --git a/API/DTOs/MangaFileDto.cs b/API/DTOs/MangaFileDto.cs index a3e9c2713..d20da8eb5 100644 --- a/API/DTOs/MangaFileDto.cs +++ b/API/DTOs/MangaFileDto.cs @@ -1,15 +1,14 @@ using System; using API.Entities.Enums; -namespace API.DTOs -{ - public class MangaFileDto - { - public int Id { get; init; } - public string FilePath { get; init; } - public int Pages { get; init; } - public MangaFormat Format { get; init; } - public DateTime Created { get; init; } +namespace API.DTOs; + +public class MangaFileDto +{ + public int Id { get; init; } + public string FilePath { get; init; } + public int Pages { get; init; } + public MangaFormat Format { get; init; } + public DateTime Created { get; init; } - } } diff --git a/API/DTOs/MemberDto.cs b/API/DTOs/MemberDto.cs index 8215cebc2..1805c1d24 100644 --- a/API/DTOs/MemberDto.cs +++ b/API/DTOs/MemberDto.cs @@ -1,19 +1,22 @@ using System; using System.Collections.Generic; +using API.Data.Misc; +using API.DTOs.Account; +using API.Entities.Enums; -namespace API.DTOs +namespace API.DTOs; + +/// +/// Represents a member of a Kavita server. +/// +public class MemberDto { - /// - /// Represents a member of a Kavita server. - /// - public class MemberDto - { - public int Id { get; init; } - public string Username { get; init; } - public string Email { get; init; } - public DateTime Created { get; init; } - public DateTime LastActive { get; init; } - public IEnumerable Libraries { get; init; } - public IEnumerable Roles { get; init; } - } + public int Id { get; init; } + public string Username { get; init; } + public string Email { get; init; } + public AgeRestrictionDto AgeRestriction { get; init; } + public DateTime Created { get; init; } + public DateTime LastActive { get; init; } + public IEnumerable Libraries { get; init; } + public IEnumerable Roles { get; init; } } diff --git a/API/DTOs/Metadata/ChapterMetadataDto.cs b/API/DTOs/Metadata/ChapterMetadataDto.cs index 2c3add195..cea8638d3 100644 --- a/API/DTOs/Metadata/ChapterMetadataDto.cs +++ b/API/DTOs/Metadata/ChapterMetadataDto.cs @@ -1,56 +1,55 @@ using System.Collections.Generic; using API.Entities.Enums; -namespace API.DTOs.Metadata +namespace API.DTOs.Metadata; + +/// +/// Exclusively metadata about a given chapter +/// +public class ChapterMetadataDto { + public int Id { get; set; } + public int ChapterId { get; set; } + public string Title { get; set; } + public ICollection Writers { get; set; } = new List(); + public ICollection CoverArtists { get; set; } = new List(); + public ICollection Publishers { get; set; } = new List(); + public ICollection Characters { get; set; } = new List(); + public ICollection Pencillers { get; set; } = new List(); + public ICollection Inkers { get; set; } = new List(); + public ICollection Colorists { get; set; } = new List(); + public ICollection Letterers { get; set; } = new List(); + public ICollection Editors { get; set; } = new List(); + public ICollection Translators { get; set; } = new List(); + + public ICollection Genres { get; set; } = new List(); + /// - /// Exclusively metadata about a given chapter + /// Collection of all Tags from underlying chapters for a Series /// - public class ChapterMetadataDto - { - public int Id { get; set; } - public int ChapterId { get; set; } - public string Title { get; set; } - public ICollection Writers { get; set; } = new List(); - public ICollection CoverArtists { get; set; } = new List(); - public ICollection Publishers { get; set; } = new List(); - public ICollection Characters { get; set; } = new List(); - public ICollection Pencillers { get; set; } = new List(); - public ICollection Inkers { get; set; } = new List(); - public ICollection Colorists { get; set; } = new List(); - public ICollection Letterers { get; set; } = new List(); - public ICollection Editors { get; set; } = new List(); - public ICollection Translators { get; set; } = new List(); + public ICollection Tags { get; set; } = new List(); + public AgeRating AgeRating { get; set; } + public string ReleaseDate { get; set; } + public PublicationStatus PublicationStatus { get; set; } + /// + /// Summary for the Chapter/Issue + /// + public string Summary { get; set; } + /// + /// Language for the Chapter/Issue + /// + public string Language { get; set; } + /// + /// Number in the TotalCount of issues + /// + public int Count { get; set; } + /// + /// Total number of issues for the series + /// + public int TotalCount { get; set; } + /// + /// Number of Words for this chapter. Only applies to Epub + /// + public long WordCount { get; set; } - public ICollection Genres { get; set; } = new List(); - - /// - /// Collection of all Tags from underlying chapters for a Series - /// - public ICollection Tags { get; set; } = new List(); - public AgeRating AgeRating { get; set; } - public string ReleaseDate { get; set; } - public PublicationStatus PublicationStatus { get; set; } - /// - /// Summary for the Chapter/Issue - /// - public string Summary { get; set; } - /// - /// Language for the Chapter/Issue - /// - public string Language { get; set; } - /// - /// Number in the TotalCount of issues - /// - public int Count { get; set; } - /// - /// Total number of issues for the series - /// - public int TotalCount { get; set; } - /// - /// Number of Words for this chapter. Only applies to Epub - /// - public long WordCount { get; set; } - - } } diff --git a/API/DTOs/Metadata/GenreTagDto.cs b/API/DTOs/Metadata/GenreTagDto.cs index e6ea03130..21d02273d 100644 --- a/API/DTOs/Metadata/GenreTagDto.cs +++ b/API/DTOs/Metadata/GenreTagDto.cs @@ -1,8 +1,7 @@ -namespace API.DTOs.Metadata +namespace API.DTOs.Metadata; + +public class GenreTagDto { - public class GenreTagDto - { - public int Id { get; set; } - public string Title { get; set; } - } + public int Id { get; set; } + public string Title { get; set; } } diff --git a/API/DTOs/OPDS/Feed.cs b/API/DTOs/OPDS/Feed.cs index efbffe8ac..20f8897a8 100644 --- a/API/DTOs/OPDS/Feed.cs +++ b/API/DTOs/OPDS/Feed.cs @@ -2,61 +2,60 @@ using System.Collections.Generic; using System.Xml.Serialization; -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +/// +/// +/// +[XmlRoot("feed", Namespace = "http://www.w3.org/2005/Atom")] +public class Feed { - /// - /// - /// - [XmlRoot("feed", Namespace = "http://www.w3.org/2005/Atom")] - public class Feed + [XmlElement("updated")] + public string Updated { get; init; } = DateTime.UtcNow.ToString("s"); + + [XmlElement("id")] + public string Id { get; set; } + + [XmlElement("title")] + public string Title { get; set; } + + [XmlElement("icon")] + public string Icon { get; set; } = "/favicon.ico"; + + [XmlElement("author")] + public FeedAuthor Author { get; set; } = new FeedAuthor() { - [XmlElement("updated")] - public string Updated { get; init; } = DateTime.UtcNow.ToString("s"); + Name = "Kavita", + Uri = "https://kavitareader.com" + }; - [XmlElement("id")] - public string Id { get; set; } + [XmlElement("totalResults", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] + public int? Total { get; set; } = null; - [XmlElement("title")] - public string Title { get; set; } + [XmlElement("itemsPerPage", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] + public int? ItemsPerPage { get; set; } = null; - [XmlElement("icon")] - public string Icon { get; set; } = "/favicon.ico"; + [XmlElement("startIndex", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] + public int? StartIndex { get; set; } = null; - [XmlElement("author")] - public FeedAuthor Author { get; set; } = new FeedAuthor() - { - Name = "Kavita", - Uri = "https://kavitareader.com" - }; + [XmlElement("link")] + public List Links { get; set; } = new List() ; - [XmlElement("totalResults", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] - public int? Total { get; set; } = null; + [XmlElement("entry")] + public List Entries { get; set; } = new List(); - [XmlElement("itemsPerPage", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] - public int? ItemsPerPage { get; set; } = null; + public bool ShouldSerializeTotal() + { + return Total.HasValue; + } - [XmlElement("startIndex", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] - public int? StartIndex { get; set; } = null; + public bool ShouldSerializeItemsPerPage() + { + return ItemsPerPage.HasValue; + } - [XmlElement("link")] - public List Links { get; set; } = new List() ; - - [XmlElement("entry")] - public List Entries { get; set; } = new List(); - - public bool ShouldSerializeTotal() - { - return Total.HasValue; - } - - public bool ShouldSerializeItemsPerPage() - { - return ItemsPerPage.HasValue; - } - - public bool ShouldSerializeStartIndex() - { - return StartIndex.HasValue; - } + public bool ShouldSerializeStartIndex() + { + return StartIndex.HasValue; } } diff --git a/API/DTOs/OPDS/FeedAuthor.cs b/API/DTOs/OPDS/FeedAuthor.cs index ec0446d73..1fd3e6cd2 100644 --- a/API/DTOs/OPDS/FeedAuthor.cs +++ b/API/DTOs/OPDS/FeedAuthor.cs @@ -1,12 +1,11 @@ using System.Xml.Serialization; -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public class FeedAuthor { - public class FeedAuthor - { - [XmlElement("name")] - public string Name { get; set; } - [XmlElement("uri")] - public string Uri { get; set; } - } + [XmlElement("name")] + public string Name { get; set; } + [XmlElement("uri")] + public string Uri { get; set; } } diff --git a/API/DTOs/OPDS/FeedEntry.cs b/API/DTOs/OPDS/FeedEntry.cs index 9d2621dfd..43b00e1cd 100644 --- a/API/DTOs/OPDS/FeedEntry.cs +++ b/API/DTOs/OPDS/FeedEntry.cs @@ -2,50 +2,49 @@ using System.Collections.Generic; using System.Xml.Serialization; -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public class FeedEntry { - public class FeedEntry - { - [XmlElement("updated")] - public string Updated { get; init; } = DateTime.UtcNow.ToString("s"); + [XmlElement("updated")] + public string Updated { get; init; } = DateTime.UtcNow.ToString("s"); - [XmlElement("id")] - public string Id { get; set; } + [XmlElement("id")] + public string Id { get; set; } - [XmlElement("title")] - public string Title { get; set; } + [XmlElement("title")] + public string Title { get; set; } - [XmlElement("summary")] - public string Summary { get; set; } + [XmlElement("summary")] + public string Summary { get; set; } - /// - /// Represents Size of the Entry - /// Tag: , ElementName = "dcterms:extent" - /// 2 MB - /// - [XmlElement("extent", Namespace = "http://purl.org/dc/terms/")] - public string Extent { get; set; } + /// + /// Represents Size of the Entry + /// Tag: , ElementName = "dcterms:extent" + /// 2 MB + /// + [XmlElement("extent", Namespace = "http://purl.org/dc/terms/")] + public string Extent { get; set; } - /// - /// Format of the file - /// https://dublincore.org/specifications/dublin-core/dcmi-terms/ - /// - [XmlElement("format", Namespace = "http://purl.org/dc/terms/format")] - public string Format { get; set; } + /// + /// Format of the file + /// https://dublincore.org/specifications/dublin-core/dcmi-terms/ + /// + [XmlElement("format", Namespace = "http://purl.org/dc/terms/format")] + public string Format { get; set; } - [XmlElement("language", Namespace = "http://purl.org/dc/terms/")] - public string Language { get; set; } + [XmlElement("language", Namespace = "http://purl.org/dc/terms/")] + public string Language { get; set; } - [XmlElement("content")] - public FeedEntryContent Content { get; set; } + [XmlElement("content")] + public FeedEntryContent Content { get; set; } - [XmlElement("link")] - public List Links = new List(); + [XmlElement("link")] + public List Links = new List(); - // [XmlElement("author")] - // public List Authors = new List(); + // [XmlElement("author")] + // public List Authors = new List(); - // [XmlElement("category")] - // public List Categories = new List(); - } + // [XmlElement("category")] + // public List Categories = new List(); } diff --git a/API/DTOs/OPDS/FeedEntryContent.cs b/API/DTOs/OPDS/FeedEntryContent.cs index d965cc3f4..3e95ce643 100644 --- a/API/DTOs/OPDS/FeedEntryContent.cs +++ b/API/DTOs/OPDS/FeedEntryContent.cs @@ -1,12 +1,11 @@ using System.Xml.Serialization; -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public class FeedEntryContent { - public class FeedEntryContent - { - [XmlAttribute("type")] - public string Type = "text"; - [XmlText] - public string Text; - } + [XmlAttribute("type")] + public string Type = "text"; + [XmlText] + public string Text; } diff --git a/API/DTOs/OPDS/FeedLink.cs b/API/DTOs/OPDS/FeedLink.cs index 1589109ad..b4ed730a8 100644 --- a/API/DTOs/OPDS/FeedLink.cs +++ b/API/DTOs/OPDS/FeedLink.cs @@ -1,33 +1,32 @@ using System.Xml.Serialization; -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public class FeedLink { - public class FeedLink + /// + /// Relation on the Link + /// + [XmlAttribute("rel")] + public string Rel { get; set; } + + /// + /// Should be any of the types here + /// + [XmlAttribute("type")] + public string Type { get; set; } + + [XmlAttribute("href")] + public string Href { get; set; } + + [XmlAttribute("title")] + public string Title { get; set; } + + [XmlAttribute("count", Namespace = "http://vaemendis.net/opds-pse/ns")] + public int TotalPages { get; set; } + + public bool ShouldSerializeTotalPages() { - /// - /// Relation on the Link - /// - [XmlAttribute("rel")] - public string Rel { get; set; } - - /// - /// Should be any of the types here - /// - [XmlAttribute("type")] - public string Type { get; set; } - - [XmlAttribute("href")] - public string Href { get; set; } - - [XmlAttribute("title")] - public string Title { get; set; } - - [XmlAttribute("count", Namespace = "http://vaemendis.net/opds-pse/ns")] - public int TotalPages { get; set; } - - public bool ShouldSerializeTotalPages() - { - return TotalPages > 0; - } + return TotalPages > 0; } } diff --git a/API/DTOs/OPDS/FeedLinkRelation.cs b/API/DTOs/OPDS/FeedLinkRelation.cs index 9702dd943..4c9ee2c94 100644 --- a/API/DTOs/OPDS/FeedLinkRelation.cs +++ b/API/DTOs/OPDS/FeedLinkRelation.cs @@ -1,24 +1,23 @@ -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public static class FeedLinkRelation { - public static class FeedLinkRelation - { - public const string Debug = "debug"; - public const string Search = "search"; - public const string Self = "self"; - public const string Start = "start"; - public const string Next = "next"; - public const string Prev = "prev"; - public const string Alternate = "alternate"; - public const string SubSection = "subsection"; - public const string Related = "related"; - public const string Image = "http://opds-spec.org/image"; - public const string Thumbnail = "http://opds-spec.org/image/thumbnail"; - /// - /// This will allow for a download to occur - /// - public const string Acquisition = "http://opds-spec.org/acquisition/open-access"; + public const string Debug = "debug"; + public const string Search = "search"; + public const string Self = "self"; + public const string Start = "start"; + public const string Next = "next"; + public const string Prev = "prev"; + public const string Alternate = "alternate"; + public const string SubSection = "subsection"; + public const string Related = "related"; + public const string Image = "http://opds-spec.org/image"; + public const string Thumbnail = "http://opds-spec.org/image/thumbnail"; + /// + /// This will allow for a download to occur + /// + public const string Acquisition = "http://opds-spec.org/acquisition/open-access"; #pragma warning disable S1075 - public const string Stream = "http://vaemendis.net/opds-pse/stream"; + public const string Stream = "http://vaemendis.net/opds-pse/stream"; #pragma warning restore S1075 - } } diff --git a/API/DTOs/OPDS/FeedLinkType.cs b/API/DTOs/OPDS/FeedLinkType.cs index 2119a6f80..6ae48bd52 100644 --- a/API/DTOs/OPDS/FeedLinkType.cs +++ b/API/DTOs/OPDS/FeedLinkType.cs @@ -1,11 +1,10 @@ -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public static class FeedLinkType { - public static class FeedLinkType - { - public const string Atom = "application/atom+xml"; - public const string AtomSearch = "application/opensearchdescription+xml"; - public const string AtomNavigation = "application/atom+xml;profile=opds-catalog;kind=navigation"; - public const string AtomAcquisition = "application/atom+xml;profile=opds-catalog;kind=acquisition"; - public const string Image = "image/jpeg"; - } + public const string Atom = "application/atom+xml"; + public const string AtomSearch = "application/opensearchdescription+xml"; + public const string AtomNavigation = "application/atom+xml;profile=opds-catalog;kind=navigation"; + public const string AtomAcquisition = "application/atom+xml;profile=opds-catalog;kind=acquisition"; + public const string Image = "image/jpeg"; } diff --git a/API/DTOs/OPDS/OpenSearchDescription.cs b/API/DTOs/OPDS/OpenSearchDescription.cs index 94eba555c..6ee043ac4 100644 --- a/API/DTOs/OPDS/OpenSearchDescription.cs +++ b/API/DTOs/OPDS/OpenSearchDescription.cs @@ -1,42 +1,41 @@ using System.Xml.Serialization; -namespace API.DTOs.OPDS -{ - [XmlRoot("OpenSearchDescription", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] - public class OpenSearchDescription - { - /// - /// Contains a brief human-readable title that identifies this search engine. - /// - public string ShortName { get; set; } - /// - /// Contains an extended human-readable title that identifies this search engine. - /// - public string LongName { get; set; } - /// - /// Contains a human-readable text description of the search engine. - /// - public string Description { get; set; } - /// - /// https://github.com/dewitt/opensearch/blob/master/opensearch-1-1-draft-6.md#the-url-element - /// - public SearchLink Url { get; set; } - /// - /// Contains a set of words that are used as keywords to identify and categorize this search content. - /// Tags must be a single word and are delimited by the space character (' '). - /// - public string Tags { get; set; } - /// - /// Contains a URL that identifies the location of an image that can be used in association with this search content. - /// http://example.com/websearch.png - /// - public string Image { get; set; } - public string InputEncoding { get; set; } = "UTF-8"; - public string OutputEncoding { get; set; } = "UTF-8"; - /// - /// Contains the human-readable name or identifier of the creator or maintainer of the description document. - /// - public string Developer { get; set; } = "kavitareader.com"; +namespace API.DTOs.OPDS; + +[XmlRoot("OpenSearchDescription", Namespace = "http://a9.com/-/spec/opensearch/1.1/")] +public class OpenSearchDescription +{ + /// + /// Contains a brief human-readable title that identifies this search engine. + /// + public string ShortName { get; set; } + /// + /// Contains an extended human-readable title that identifies this search engine. + /// + public string LongName { get; set; } + /// + /// Contains a human-readable text description of the search engine. + /// + public string Description { get; set; } + /// + /// https://github.com/dewitt/opensearch/blob/master/opensearch-1-1-draft-6.md#the-url-element + /// + public SearchLink Url { get; set; } + /// + /// Contains a set of words that are used as keywords to identify and categorize this search content. + /// Tags must be a single word and are delimited by the space character (' '). + /// + public string Tags { get; set; } + /// + /// Contains a URL that identifies the location of an image that can be used in association with this search content. + /// http://example.com/websearch.png + /// + public string Image { get; set; } + public string InputEncoding { get; set; } = "UTF-8"; + public string OutputEncoding { get; set; } = "UTF-8"; + /// + /// Contains the human-readable name or identifier of the creator or maintainer of the description document. + /// + public string Developer { get; set; } = "kavitareader.com"; - } } diff --git a/API/DTOs/OPDS/SearchLink.cs b/API/DTOs/OPDS/SearchLink.cs index db5a20f23..6aeca506a 100644 --- a/API/DTOs/OPDS/SearchLink.cs +++ b/API/DTOs/OPDS/SearchLink.cs @@ -1,16 +1,15 @@ using System.Xml.Serialization; -namespace API.DTOs.OPDS +namespace API.DTOs.OPDS; + +public class SearchLink { - public class SearchLink - { - [XmlAttribute("type")] - public string Type { get; set; } + [XmlAttribute("type")] + public string Type { get; set; } - [XmlAttribute("rel")] - public string Rel { get; set; } = "results"; + [XmlAttribute("rel")] + public string Rel { get; set; } = "results"; - [XmlAttribute("template")] - public string Template { get; set; } - } + [XmlAttribute("template")] + public string Template { get; set; } } diff --git a/API/DTOs/PersonDto.cs b/API/DTOs/PersonDto.cs index 0ab7a4076..92bd81924 100644 --- a/API/DTOs/PersonDto.cs +++ b/API/DTOs/PersonDto.cs @@ -1,11 +1,10 @@ using API.Entities.Enums; -namespace API.DTOs +namespace API.DTOs; + +public class PersonDto { - public class PersonDto - { - public int Id { get; set; } - public string Name { get; set; } - public PersonRole Role { get; set; } - } + public int Id { get; set; } + public string Name { get; set; } + public PersonRole Role { get; set; } } diff --git a/API/DTOs/ProgressDto.cs b/API/DTOs/ProgressDto.cs index 021a5f243..1bab779cb 100644 --- a/API/DTOs/ProgressDto.cs +++ b/API/DTOs/ProgressDto.cs @@ -1,21 +1,20 @@ using System.ComponentModel.DataAnnotations; -namespace API.DTOs +namespace API.DTOs; + +public class ProgressDto { - public class ProgressDto - { - [Required] - public int VolumeId { get; set; } - [Required] - public int ChapterId { get; set; } - [Required] - public int PageNum { get; set; } - [Required] - public int SeriesId { get; set; } - /// - /// For Book reader, this can be an optional string of the id of a part marker, to help resume reading position - /// on pages that combine multiple "chapters". - /// - public string BookScrollId { get; set; } - } + [Required] + public int VolumeId { get; set; } + [Required] + public int ChapterId { get; set; } + [Required] + public int PageNum { get; set; } + [Required] + public int SeriesId { get; set; } + /// + /// For Book reader, this can be an optional string of the id of a part marker, to help resume reading position + /// on pages that combine multiple "chapters". + /// + public string BookScrollId { get; set; } } diff --git a/API/DTOs/Reader/BookChapterItem.cs b/API/DTOs/Reader/BookChapterItem.cs index 9db676cc5..3dabbd1ec 100644 --- a/API/DTOs/Reader/BookChapterItem.cs +++ b/API/DTOs/Reader/BookChapterItem.cs @@ -1,21 +1,20 @@ using System.Collections.Generic; -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class BookChapterItem { - public class BookChapterItem - { - /// - /// Name of the Chapter - /// - public string Title { get; set; } - /// - /// A part represents the id of the anchor so we can scroll to it. 01_values.xhtml#h_sVZPaxUSy/ - /// - public string Part { get; set; } - /// - /// Page Number to load for the chapter - /// - public int Page { get; set; } - public ICollection Children { get; set; } - } + /// + /// Name of the Chapter + /// + public string Title { get; set; } + /// + /// A part represents the id of the anchor so we can scroll to it. 01_values.xhtml#h_sVZPaxUSy/ + /// + public string Part { get; set; } + /// + /// Page Number to load for the chapter + /// + public int Page { get; set; } + public ICollection Children { get; set; } } diff --git a/API/DTOs/Reader/BookInfoDto.cs b/API/DTOs/Reader/BookInfoDto.cs index b881c1b10..78cfc39b0 100644 --- a/API/DTOs/Reader/BookInfoDto.cs +++ b/API/DTOs/Reader/BookInfoDto.cs @@ -1,19 +1,18 @@ using API.Entities.Enums; -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class BookInfoDto : IChapterInfoDto { - public class BookInfoDto : IChapterInfoDto - { - public string BookTitle { get; set; } - public int SeriesId { get; set; } - public int VolumeId { get; set; } - public MangaFormat SeriesFormat { get; set; } - public string SeriesName { get; set; } - public string ChapterNumber { get; set; } - public string VolumeNumber { get; set; } - public int LibraryId { get; set; } - public int Pages { get; set; } - public bool IsSpecial { get; set; } - public string ChapterTitle { get; set; } - } + public string BookTitle { get; set; } + public int SeriesId { get; set; } + public int VolumeId { get; set; } + public MangaFormat SeriesFormat { get; set; } + public string SeriesName { get; set; } + public string ChapterNumber { get; set; } + public string VolumeNumber { get; set; } + public int LibraryId { get; set; } + public int Pages { get; set; } + public bool IsSpecial { get; set; } + public string ChapterTitle { get; set; } } diff --git a/API/DTOs/Reader/BookmarkDto.cs b/API/DTOs/Reader/BookmarkDto.cs index 33f55cf8d..b132eb958 100644 --- a/API/DTOs/Reader/BookmarkDto.cs +++ b/API/DTOs/Reader/BookmarkDto.cs @@ -1,17 +1,16 @@ using System.ComponentModel.DataAnnotations; -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class BookmarkDto { - public class BookmarkDto - { - public int Id { get; set; } - [Required] - public int Page { get; set; } - [Required] - public int VolumeId { get; set; } - [Required] - public int SeriesId { get; set; } - [Required] - public int ChapterId { get; set; } - } + public int Id { get; set; } + [Required] + public int Page { get; set; } + [Required] + public int VolumeId { get; set; } + [Required] + public int SeriesId { get; set; } + [Required] + public int ChapterId { get; set; } } diff --git a/API/DTOs/Reader/BulkRemoveBookmarkForSeriesDto.cs b/API/DTOs/Reader/BulkRemoveBookmarkForSeriesDto.cs index 2408154b8..9cd22f958 100644 --- a/API/DTOs/Reader/BulkRemoveBookmarkForSeriesDto.cs +++ b/API/DTOs/Reader/BulkRemoveBookmarkForSeriesDto.cs @@ -1,9 +1,8 @@ using System.Collections.Generic; -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class BulkRemoveBookmarkForSeriesDto { - public class BulkRemoveBookmarkForSeriesDto - { - public ICollection SeriesIds { get; init; } - } + public ICollection SeriesIds { get; init; } } diff --git a/API/DTOs/Reader/IChapterInfoDto.cs b/API/DTOs/Reader/IChapterInfoDto.cs index e448e5e13..6a9a74a2c 100644 --- a/API/DTOs/Reader/IChapterInfoDto.cs +++ b/API/DTOs/Reader/IChapterInfoDto.cs @@ -1,19 +1,18 @@ using API.Entities.Enums; -namespace API.DTOs.Reader -{ - public interface IChapterInfoDto - { - public int SeriesId { get; set; } - public int VolumeId { get; set; } - public MangaFormat SeriesFormat { get; set; } - public string SeriesName { get; set; } - public string ChapterNumber { get; set; } - public string VolumeNumber { get; set; } - public int LibraryId { get; set; } - public int Pages { get; set; } - public bool IsSpecial { get; set; } - public string ChapterTitle { get; set; } +namespace API.DTOs.Reader; + +public interface IChapterInfoDto +{ + public int SeriesId { get; set; } + public int VolumeId { get; set; } + public MangaFormat SeriesFormat { get; set; } + public string SeriesName { get; set; } + public string ChapterNumber { get; set; } + public string VolumeNumber { get; set; } + public int LibraryId { get; set; } + public int Pages { get; set; } + public bool IsSpecial { get; set; } + public string ChapterTitle { get; set; } - } } diff --git a/API/DTOs/Reader/MarkMultipleSeriesAsReadDto.cs b/API/DTOs/Reader/MarkMultipleSeriesAsReadDto.cs index 7201658fa..da36e44f5 100644 --- a/API/DTOs/Reader/MarkMultipleSeriesAsReadDto.cs +++ b/API/DTOs/Reader/MarkMultipleSeriesAsReadDto.cs @@ -1,9 +1,8 @@ using System.Collections.Generic; -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class MarkMultipleSeriesAsReadDto { - public class MarkMultipleSeriesAsReadDto - { - public IReadOnlyList SeriesIds { get; init; } - } + public IReadOnlyList SeriesIds { get; init; } } diff --git a/API/DTOs/Reader/MarkReadDto.cs b/API/DTOs/Reader/MarkReadDto.cs index 3d94e3a9d..9bf46a6d5 100644 --- a/API/DTOs/Reader/MarkReadDto.cs +++ b/API/DTOs/Reader/MarkReadDto.cs @@ -1,7 +1,6 @@ -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class MarkReadDto { - public class MarkReadDto - { - public int SeriesId { get; init; } - } + public int SeriesId { get; init; } } diff --git a/API/DTOs/Reader/MarkVolumeReadDto.cs b/API/DTOs/Reader/MarkVolumeReadDto.cs index 757f23aee..47ffd2649 100644 --- a/API/DTOs/Reader/MarkVolumeReadDto.cs +++ b/API/DTOs/Reader/MarkVolumeReadDto.cs @@ -1,8 +1,7 @@ -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class MarkVolumeReadDto { - public class MarkVolumeReadDto - { - public int SeriesId { get; init; } - public int VolumeId { get; init; } - } + public int SeriesId { get; init; } + public int VolumeId { get; init; } } diff --git a/API/DTOs/Reader/MarkVolumesReadDto.cs b/API/DTOs/Reader/MarkVolumesReadDto.cs index 7e23e721a..9f02af524 100644 --- a/API/DTOs/Reader/MarkVolumesReadDto.cs +++ b/API/DTOs/Reader/MarkVolumesReadDto.cs @@ -1,20 +1,19 @@ using System.Collections.Generic; -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +/// +/// This is used for bulk updating a set of volume and or chapters in one go +/// +public class MarkVolumesReadDto { + public int SeriesId { get; set; } /// - /// This is used for bulk updating a set of volume and or chapters in one go + /// A list of Volumes to mark read /// - public class MarkVolumesReadDto - { - public int SeriesId { get; set; } - /// - /// A list of Volumes to mark read - /// - public IReadOnlyList VolumeIds { get; set; } - /// - /// A list of additional Chapters to mark as read - /// - public IReadOnlyList ChapterIds { get; set; } - } + public IReadOnlyList VolumeIds { get; set; } + /// + /// A list of additional Chapters to mark as read + /// + public IReadOnlyList ChapterIds { get; set; } } diff --git a/API/DTOs/Reader/RemoveBookmarkForSeriesDto.cs b/API/DTOs/Reader/RemoveBookmarkForSeriesDto.cs index a269b7095..ed6368a4f 100644 --- a/API/DTOs/Reader/RemoveBookmarkForSeriesDto.cs +++ b/API/DTOs/Reader/RemoveBookmarkForSeriesDto.cs @@ -1,7 +1,6 @@ -namespace API.DTOs.Reader +namespace API.DTOs.Reader; + +public class RemoveBookmarkForSeriesDto { - public class RemoveBookmarkForSeriesDto - { - public int SeriesId { get; init; } - } + public int SeriesId { get; init; } } diff --git a/API/DTOs/ReadingLists/CreateReadingListDto.cs b/API/DTOs/ReadingLists/CreateReadingListDto.cs index c32b62bea..396c05e7c 100644 --- a/API/DTOs/ReadingLists/CreateReadingListDto.cs +++ b/API/DTOs/ReadingLists/CreateReadingListDto.cs @@ -1,7 +1,6 @@ -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class CreateReadingListDto { - public class CreateReadingListDto - { - public string Title { get; init; } - } + public string Title { get; init; } } diff --git a/API/DTOs/ReadingLists/ReadingListDto.cs b/API/DTOs/ReadingLists/ReadingListDto.cs index ba446d17a..de212217e 100644 --- a/API/DTOs/ReadingLists/ReadingListDto.cs +++ b/API/DTOs/ReadingLists/ReadingListDto.cs @@ -1,18 +1,17 @@ -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class ReadingListDto { - public class ReadingListDto - { - public int Id { get; init; } - public string Title { get; set; } - public string Summary { get; set; } - /// - /// Reading lists that are promoted are only done by admins - /// - public bool Promoted { get; set; } - public bool CoverImageLocked { get; set; } - /// - /// This is used to tell the UI if it should request a Cover Image or not. If null or empty, it has not been set. - /// - public string CoverImage { get; set; } = string.Empty; - } + public int Id { get; init; } + public string Title { get; set; } + public string Summary { get; set; } + /// + /// Reading lists that are promoted are only done by admins + /// + public bool Promoted { get; set; } + public bool CoverImageLocked { get; set; } + /// + /// This is used to tell the UI if it should request a Cover Image or not. If null or empty, it has not been set. + /// + public string CoverImage { get; set; } = string.Empty; } diff --git a/API/DTOs/ReadingLists/ReadingListItemDto.cs b/API/DTOs/ReadingLists/ReadingListItemDto.cs index b58fdcf80..39f844d8b 100644 --- a/API/DTOs/ReadingLists/ReadingListItemDto.cs +++ b/API/DTOs/ReadingLists/ReadingListItemDto.cs @@ -1,25 +1,24 @@ using API.Entities.Enums; -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class ReadingListItemDto { - public class ReadingListItemDto - { - public int Id { get; init; } - public int Order { get; init; } - public int ChapterId { get; init; } - public int SeriesId { get; init; } - public string SeriesName { get; set; } - public MangaFormat SeriesFormat { get; set; } - public int PagesRead { get; set; } - public int PagesTotal { get; set; } - public string ChapterNumber { get; set; } - public string VolumeNumber { get; set; } - public int VolumeId { get; set; } - public int LibraryId { get; set; } - public string Title { get; set; } - /// - /// Used internally only - /// - public int ReadingListId { get; set; } - } + public int Id { get; init; } + public int Order { get; init; } + public int ChapterId { get; init; } + public int SeriesId { get; init; } + public string SeriesName { get; set; } + public MangaFormat SeriesFormat { get; set; } + public int PagesRead { get; set; } + public int PagesTotal { get; set; } + public string ChapterNumber { get; set; } + public string VolumeNumber { get; set; } + public int VolumeId { get; set; } + public int LibraryId { get; set; } + public string Title { get; set; } + /// + /// Used internally only + /// + public int ReadingListId { get; set; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListByChapterDto.cs b/API/DTOs/ReadingLists/UpdateReadingListByChapterDto.cs index 887850755..985f86ac0 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListByChapterDto.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListByChapterDto.cs @@ -1,9 +1,8 @@ -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class UpdateReadingListByChapterDto { - public class UpdateReadingListByChapterDto - { - public int ChapterId { get; init; } - public int SeriesId { get; init; } - public int ReadingListId { get; init; } - } + public int ChapterId { get; init; } + public int SeriesId { get; init; } + public int ReadingListId { get; init; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListByMultipleDto.cs b/API/DTOs/ReadingLists/UpdateReadingListByMultipleDto.cs index 02a41a767..0d4bfb0dd 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListByMultipleDto.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListByMultipleDto.cs @@ -1,12 +1,11 @@ using System.Collections.Generic; -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class UpdateReadingListByMultipleDto { - public class UpdateReadingListByMultipleDto - { - public int SeriesId { get; init; } - public int ReadingListId { get; init; } - public IReadOnlyList VolumeIds { get; init; } - public IReadOnlyList ChapterIds { get; init; } - } + public int SeriesId { get; init; } + public int ReadingListId { get; init; } + public IReadOnlyList VolumeIds { get; init; } + public IReadOnlyList ChapterIds { get; init; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListByMultipleSeriesDto.cs b/API/DTOs/ReadingLists/UpdateReadingListByMultipleSeriesDto.cs index 4b08f95bc..944d4ff78 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListByMultipleSeriesDto.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListByMultipleSeriesDto.cs @@ -1,10 +1,9 @@ using System.Collections.Generic; -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class UpdateReadingListByMultipleSeriesDto { - public class UpdateReadingListByMultipleSeriesDto - { - public int ReadingListId { get; init; } - public IReadOnlyList SeriesIds { get; init; } - } + public int ReadingListId { get; init; } + public IReadOnlyList SeriesIds { get; init; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListBySeriesDto.cs b/API/DTOs/ReadingLists/UpdateReadingListBySeriesDto.cs index 1040a9218..0590882bd 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListBySeriesDto.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListBySeriesDto.cs @@ -1,8 +1,7 @@ -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class UpdateReadingListBySeriesDto { - public class UpdateReadingListBySeriesDto - { - public int SeriesId { get; init; } - public int ReadingListId { get; init; } - } + public int SeriesId { get; init; } + public int ReadingListId { get; init; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListByVolumeDto.cs b/API/DTOs/ReadingLists/UpdateReadingListByVolumeDto.cs index 0d903d48e..f77c7d63a 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListByVolumeDto.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListByVolumeDto.cs @@ -1,9 +1,8 @@ -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class UpdateReadingListByVolumeDto { - public class UpdateReadingListByVolumeDto - { - public int VolumeId { get; init; } - public int SeriesId { get; init; } - public int ReadingListId { get; init; } - } + public int VolumeId { get; init; } + public int SeriesId { get; init; } + public int ReadingListId { get; init; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListDto.cs b/API/DTOs/ReadingLists/UpdateReadingListDto.cs index 5b8f69731..b61ab2a72 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListDto.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListDto.cs @@ -1,11 +1,10 @@ -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +public class UpdateReadingListDto { - public class UpdateReadingListDto - { - public int ReadingListId { get; set; } - public string Title { get; set; } - public string Summary { get; set; } - public bool Promoted { get; set; } - public bool CoverImageLocked { get; set; } - } + public int ReadingListId { get; set; } + public string Title { get; set; } + public string Summary { get; set; } + public bool Promoted { get; set; } + public bool CoverImageLocked { get; set; } } diff --git a/API/DTOs/ReadingLists/UpdateReadingListPosition.cs b/API/DTOs/ReadingLists/UpdateReadingListPosition.cs index 5407a1ad5..3d0487144 100644 --- a/API/DTOs/ReadingLists/UpdateReadingListPosition.cs +++ b/API/DTOs/ReadingLists/UpdateReadingListPosition.cs @@ -1,18 +1,14 @@ using System.ComponentModel.DataAnnotations; -namespace API.DTOs.ReadingLists +namespace API.DTOs.ReadingLists; + +/// +/// DTO for moving a reading list item to another position within the same list +/// +public class UpdateReadingListPosition { - /// - /// DTO for moving a reading list item to another position within the same list - /// - public class UpdateReadingListPosition - { - [Required] - public int ReadingListId { get; set; } - [Required] - public int ReadingListItemId { get; set; } - public int FromPosition { get; set; } - [Required] - public int ToPosition { get; set; } - } + [Required] public int ReadingListId { get; set; } + [Required] public int ReadingListItemId { get; set; } + public int FromPosition { get; set; } + [Required] public int ToPosition { get; set; } } diff --git a/API/DTOs/RefreshSeriesDto.cs b/API/DTOs/RefreshSeriesDto.cs index db1264bd3..64a684394 100644 --- a/API/DTOs/RefreshSeriesDto.cs +++ b/API/DTOs/RefreshSeriesDto.cs @@ -1,22 +1,21 @@ -namespace API.DTOs +namespace API.DTOs; + +/// +/// Used for running some task against a Series. +/// +public class RefreshSeriesDto { /// - /// Used for running some task against a Series. + /// Library Id series belongs to /// - public class RefreshSeriesDto - { - /// - /// Library Id series belongs to - /// - public int LibraryId { get; init; } - /// - /// Series Id - /// - public int SeriesId { get; init; } - /// - /// Should the task force opening/re-calculation. - /// - /// This is expensive if true. Defaults to true. - public bool ForceUpdate { get; init; } = true; - } + public int LibraryId { get; init; } + /// + /// Series Id + /// + public int SeriesId { get; init; } + /// + /// Should the task force opening/re-calculation. + /// + /// This is expensive if true. Defaults to true. + public bool ForceUpdate { get; init; } = true; } diff --git a/API/DTOs/RegisterDto.cs b/API/DTOs/RegisterDto.cs index 95814b88f..4e542f1c0 100644 --- a/API/DTOs/RegisterDto.cs +++ b/API/DTOs/RegisterDto.cs @@ -1,15 +1,16 @@ using System.ComponentModel.DataAnnotations; -namespace API.DTOs +namespace API.DTOs; + +public class RegisterDto { - public class RegisterDto - { - [Required] - public string Username { get; init; } - [Required] - public string Email { get; init; } - [Required] - [StringLength(32, MinimumLength = 6)] - public string Password { get; set; } - } + [Required] + public string Username { get; init; } + /// + /// An email to register with. Optional. Provides Forgot Password functionality + /// + public string Email { get; init; } + [Required] + [StringLength(32, MinimumLength = 6)] + public string Password { get; set; } } diff --git a/API/DTOs/Search/SearchResultDto.cs b/API/DTOs/Search/SearchResultDto.cs index 328ff7a1f..4d9e300a5 100644 --- a/API/DTOs/Search/SearchResultDto.cs +++ b/API/DTOs/Search/SearchResultDto.cs @@ -1,18 +1,17 @@ using API.Entities.Enums; -namespace API.DTOs.Search -{ - public class SearchResultDto - { - public int SeriesId { get; init; } - public string Name { get; init; } - public string OriginalName { get; init; } - public string SortName { get; init; } - public string LocalizedName { get; init; } - public MangaFormat Format { get; init; } +namespace API.DTOs.Search; - // Grouping information - public string LibraryName { get; set; } - public int LibraryId { get; set; } - } +public class SearchResultDto +{ + public int SeriesId { get; init; } + public string Name { get; init; } + public string OriginalName { get; init; } + public string SortName { get; init; } + public string LocalizedName { get; init; } + public MangaFormat Format { get; init; } + + // Grouping information + public string LibraryName { get; set; } + public int LibraryId { get; set; } } diff --git a/API/DTOs/SeriesByIdsDto.cs b/API/DTOs/SeriesByIdsDto.cs index 0ffdd8cba..29c028156 100644 --- a/API/DTOs/SeriesByIdsDto.cs +++ b/API/DTOs/SeriesByIdsDto.cs @@ -1,7 +1,6 @@ -namespace API.DTOs +namespace API.DTOs; + +public class SeriesByIdsDto { - public class SeriesByIdsDto - { - public int[] SeriesIds { get; init; } - } + public int[] SeriesIds { get; init; } } diff --git a/API/DTOs/SeriesDetail/RelatedSeriesDto.cs b/API/DTOs/SeriesDetail/RelatedSeriesDto.cs index f3c3fd644..452da9cf5 100644 --- a/API/DTOs/SeriesDetail/RelatedSeriesDto.cs +++ b/API/DTOs/SeriesDetail/RelatedSeriesDto.cs @@ -22,4 +22,5 @@ public class RelatedSeriesDto public IEnumerable AlternativeVersions { get; set; } public IEnumerable Doujinshis { get; set; } public IEnumerable Parent { get; set; } + public IEnumerable Editions { get; set; } } diff --git a/API/DTOs/SeriesDetail/UpdateRelatedSeriesDto.cs b/API/DTOs/SeriesDetail/UpdateRelatedSeriesDto.cs index b39f91244..d6976a05d 100644 --- a/API/DTOs/SeriesDetail/UpdateRelatedSeriesDto.cs +++ b/API/DTOs/SeriesDetail/UpdateRelatedSeriesDto.cs @@ -16,4 +16,5 @@ public class UpdateRelatedSeriesDto public IList AlternativeSettings { get; set; } public IList AlternativeVersions { get; set; } public IList Doujinshis { get; set; } + public IList Editions { get; set; } } diff --git a/API/DTOs/SeriesDto.cs b/API/DTOs/SeriesDto.cs index bbf65e9fb..b1b5a9f35 100644 --- a/API/DTOs/SeriesDto.cs +++ b/API/DTOs/SeriesDto.cs @@ -2,65 +2,64 @@ using API.Entities.Enums; using API.Entities.Interfaces; -namespace API.DTOs +namespace API.DTOs; + +public class SeriesDto : IHasReadTimeEstimate { - public class SeriesDto : IHasReadTimeEstimate - { - public int Id { get; init; } - public string Name { get; init; } - public string OriginalName { get; init; } - public string LocalizedName { get; init; } - public string SortName { get; init; } - public string Summary { get; init; } - public int Pages { get; init; } - public bool CoverImageLocked { get; set; } - /// - /// Sum of pages read from linked Volumes. Calculated at API-time. - /// - public int PagesRead { get; set; } - /// - /// DateTime representing last time the series was Read. Calculated at API-time. - /// - public DateTime LatestReadDate { get; set; } - /// - /// DateTime representing last time a chapter was added to the Series - /// - public DateTime LastChapterAdded { get; set; } - /// - /// Rating from logged in user. Calculated at API-time. - /// - public int UserRating { get; set; } - /// - /// Review from logged in user. Calculated at API-time. - /// - public string UserReview { get; set; } - public MangaFormat Format { get; set; } + public int Id { get; init; } + public string Name { get; init; } + public string OriginalName { get; init; } + public string LocalizedName { get; init; } + public string SortName { get; init; } + public string Summary { get; init; } + public int Pages { get; init; } + public bool CoverImageLocked { get; set; } + /// + /// Sum of pages read from linked Volumes. Calculated at API-time. + /// + public int PagesRead { get; set; } + /// + /// DateTime representing last time the series was Read. Calculated at API-time. + /// + public DateTime LatestReadDate { get; set; } + /// + /// DateTime representing last time a chapter was added to the Series + /// + public DateTime LastChapterAdded { get; set; } + /// + /// Rating from logged in user. Calculated at API-time. + /// + public int UserRating { get; set; } + /// + /// Review from logged in user. Calculated at API-time. + /// + public string UserReview { get; set; } + public MangaFormat Format { get; set; } - public DateTime Created { get; set; } + public DateTime Created { get; set; } - public bool NameLocked { get; set; } - public bool SortNameLocked { get; set; } - public bool LocalizedNameLocked { get; set; } - /// - /// Total number of words for the series. Only applies to epubs. - /// - public long WordCount { get; set; } + public bool NameLocked { get; set; } + public bool SortNameLocked { get; set; } + public bool LocalizedNameLocked { get; set; } + /// + /// Total number of words for the series. Only applies to epubs. + /// + public long WordCount { get; set; } - public int LibraryId { get; set; } - public string LibraryName { get; set; } - /// - public int MinHoursToRead { get; set; } - /// - public int MaxHoursToRead { get; set; } - /// - public int AvgHoursToRead { get; set; } - /// - /// The highest level folder for this Series - /// - public string FolderPath { get; set; } - /// - /// The last time the folder for this series was scanned - /// - public DateTime LastFolderScanned { get; set; } - } + public int LibraryId { get; set; } + public string LibraryName { get; set; } + /// + public int MinHoursToRead { get; set; } + /// + public int MaxHoursToRead { get; set; } + /// + public int AvgHoursToRead { get; set; } + /// + /// The highest level folder for this Series + /// + public string FolderPath { get; set; } + /// + /// The last time the folder for this series was scanned + /// + public DateTime LastFolderScanned { get; set; } } diff --git a/API/DTOs/SeriesMetadataDto.cs b/API/DTOs/SeriesMetadataDto.cs index 9a396f5d1..8853fdb0b 100644 --- a/API/DTOs/SeriesMetadataDto.cs +++ b/API/DTOs/SeriesMetadataDto.cs @@ -4,83 +4,83 @@ using API.DTOs.CollectionTags; using API.DTOs.Metadata; using API.Entities.Enums; -namespace API.DTOs +namespace API.DTOs; + +public class SeriesMetadataDto { - public class SeriesMetadataDto - { - public int Id { get; set; } - public string Summary { get; set; } = string.Empty; - /// - /// Collections the Series belongs to - /// - public ICollection CollectionTags { get; set; } - /// - /// Genres for the Series - /// - public ICollection Genres { get; set; } - /// - /// Collection of all Tags from underlying chapters for a Series - /// - public ICollection Tags { get; set; } - public ICollection Writers { get; set; } = new List(); - public ICollection CoverArtists { get; set; } = new List(); - public ICollection Publishers { get; set; } = new List(); - public ICollection Characters { get; set; } = new List(); - public ICollection Pencillers { get; set; } = new List(); - public ICollection Inkers { get; set; } = new List(); - public ICollection Colorists { get; set; } = new List(); - public ICollection Letterers { get; set; } = new List(); - public ICollection Editors { get; set; } = new List(); - public ICollection Translators { get; set; } = new List(); - /// - /// Highest Age Rating from all Chapters - /// - public AgeRating AgeRating { get; set; } = AgeRating.Unknown; - /// - /// Earliest Year from all chapters - /// - public int ReleaseYear { get; set; } - /// - /// Language of the content (BCP-47 code) - /// - public string Language { get; set; } = string.Empty; - /// - /// Max number of issues/volumes in the series (Max of Volume/Issue field in ComicInfo) - /// - public int MaxCount { get; set; } = 0; - /// - /// Total number of issues/volumes for the series - /// - public int TotalCount { get; set; } - /// - /// Publication status of the Series - /// - public PublicationStatus PublicationStatus { get; set; } + public int Id { get; set; } + public string Summary { get; set; } = string.Empty; + /// + /// Collections the Series belongs to + /// + public ICollection CollectionTags { get; set; } + /// + /// Genres for the Series + /// + public ICollection Genres { get; set; } + /// + /// Collection of all Tags from underlying chapters for a Series + /// + public ICollection Tags { get; set; } + public ICollection Writers { get; set; } = new List(); + public ICollection CoverArtists { get; set; } = new List(); + public ICollection Publishers { get; set; } = new List(); + public ICollection Characters { get; set; } = new List(); + public ICollection Pencillers { get; set; } = new List(); + public ICollection Inkers { get; set; } = new List(); + public ICollection Colorists { get; set; } = new List(); + public ICollection Letterers { get; set; } = new List(); + public ICollection Editors { get; set; } = new List(); + public ICollection Translators { get; set; } = new List(); + /// + /// Highest Age Rating from all Chapters + /// + public AgeRating AgeRating { get; set; } = AgeRating.Unknown; + /// + /// Earliest Year from all chapters + /// + public int ReleaseYear { get; set; } + /// + /// Language of the content (BCP-47 code) + /// + public string Language { get; set; } = string.Empty; + /// + /// Max number of issues/volumes in the series (Max of Volume/Issue field in ComicInfo) + /// + public int MaxCount { get; set; } = 0; + /// + /// Total number of issues/volumes for the series + /// + public int TotalCount { get; set; } + /// + /// Publication status of the Series + /// + public PublicationStatus PublicationStatus { get; set; } - public bool LanguageLocked { get; set; } - public bool SummaryLocked { get; set; } - /// - /// Locked by user so metadata updates from scan loop will not override AgeRating - /// - public bool AgeRatingLocked { get; set; } - /// - /// Locked by user so metadata updates from scan loop will not override PublicationStatus - /// - public bool PublicationStatusLocked { get; set; } - public bool GenresLocked { get; set; } - public bool TagsLocked { get; set; } - public bool WritersLocked { get; set; } - public bool CharactersLocked { get; set; } - public bool ColoristsLocked { get; set; } - public bool EditorsLocked { get; set; } - public bool InkersLocked { get; set; } - public bool LetterersLocked { get; set; } - public bool PencillersLocked { get; set; } - public bool PublishersLocked { get; set; } - public bool TranslatorsLocked { get; set; } - public bool CoverArtistsLocked { get; set; } + public bool LanguageLocked { get; set; } + public bool SummaryLocked { get; set; } + /// + /// Locked by user so metadata updates from scan loop will not override AgeRating + /// + public bool AgeRatingLocked { get; set; } + /// + /// Locked by user so metadata updates from scan loop will not override PublicationStatus + /// + public bool PublicationStatusLocked { get; set; } + public bool GenresLocked { get; set; } + public bool TagsLocked { get; set; } + public bool WritersLocked { get; set; } + public bool CharactersLocked { get; set; } + public bool ColoristsLocked { get; set; } + public bool EditorsLocked { get; set; } + public bool InkersLocked { get; set; } + public bool LetterersLocked { get; set; } + public bool PencillersLocked { get; set; } + public bool PublishersLocked { get; set; } + public bool TranslatorsLocked { get; set; } + public bool CoverArtistsLocked { get; set; } + public bool ReleaseYearLocked { get; set; } - public int SeriesId { get; set; } - } + public int SeriesId { get; set; } } diff --git a/API/DTOs/Settings/ServerSettingDTO.cs b/API/DTOs/Settings/ServerSettingDTO.cs index f979684af..041c9300d 100644 --- a/API/DTOs/Settings/ServerSettingDTO.cs +++ b/API/DTOs/Settings/ServerSettingDTO.cs @@ -1,64 +1,68 @@ -using API.Services; +using System.ComponentModel.DataAnnotations; +using API.Services; -namespace API.DTOs.Settings +namespace API.DTOs.Settings; + +public class ServerSettingDto { - public class ServerSettingDto - { - public string CacheDirectory { get; set; } - public string TaskScan { get; set; } - /// - /// Logging level for server. Managed in appsettings.json. - /// - public string LoggingLevel { get; set; } - public string TaskBackup { get; set; } - /// - /// Port the server listens on. Managed in appsettings.json. - /// - public int Port { get; set; } - /// - /// Allows anonymous information to be collected and sent to KavitaStats - /// - public bool AllowStatCollection { get; set; } - /// - /// Enables OPDS connections to be made to the server. - /// - public bool EnableOpds { get; set; } - /// - /// Base Url for the kavita. Requires restart to take effect. - /// - public string BaseUrl { get; set; } - /// - /// Where Bookmarks are stored. - /// - /// If null or empty string, will default back to default install setting aka - public string BookmarksDirectory { get; set; } - /// - /// Email service to use for the invite user flow, forgot password, etc. - /// - /// If null or empty string, will default back to default install setting aka - public string EmailServiceUrl { get; set; } - public string InstallVersion { get; set; } - /// - /// Represents a unique Id to this Kavita installation. Only used in Stats to identify unique installs. - /// - public string InstallId { get; set; } - /// - /// If the server should save bookmarks as WebP encoding - /// - public bool ConvertBookmarkToWebP { get; set; } - /// - /// If the Swagger UI Should be exposed. Does not require authentication, but does require a JWT. - /// - public bool EnableSwaggerUi { get; set; } - - /// - /// The amount of Backups before cleanup - /// - /// Value should be between 1 and 30 - public int TotalBackups { get; set; } = 30; - /// - /// If Kavita should watch the library folders and process changes - /// - public bool EnableFolderWatching { get; set; } = true; - } + public string CacheDirectory { get; set; } + public string TaskScan { get; set; } + /// + /// Logging level for server. Managed in appsettings.json. + /// + public string LoggingLevel { get; set; } + public string TaskBackup { get; set; } + /// + /// Port the server listens on. Managed in appsettings.json. + /// + public int Port { get; set; } + /// + /// Allows anonymous information to be collected and sent to KavitaStats + /// + public bool AllowStatCollection { get; set; } + /// + /// Enables OPDS connections to be made to the server. + /// + public bool EnableOpds { get; set; } + /// + /// Base Url for the kavita. Requires restart to take effect. + /// + public string BaseUrl { get; set; } + /// + /// Where Bookmarks are stored. + /// + /// If null or empty string, will default back to default install setting aka + public string BookmarksDirectory { get; set; } + /// + /// Email service to use for the invite user flow, forgot password, etc. + /// + /// If null or empty string, will default back to default install setting aka + public string EmailServiceUrl { get; set; } + public string InstallVersion { get; set; } + /// + /// Represents a unique Id to this Kavita installation. Only used in Stats to identify unique installs. + /// + public string InstallId { get; set; } + /// + /// If the server should save bookmarks as WebP encoding + /// + public bool ConvertBookmarkToWebP { get; set; } + /// + /// If the Swagger UI Should be exposed. Does not require authentication, but does require a JWT. + /// + public bool EnableSwaggerUi { get; set; } + /// + /// The amount of Backups before cleanup + /// + /// Value should be between 1 and 30 + public int TotalBackups { get; set; } = 30; + /// + /// If Kavita should watch the library folders and process changes + /// + public bool EnableFolderWatching { get; set; } = true; + /// + /// Total number of days worth of logs to keep at a given time. + /// + /// Value should be between 1 and 30 + public int TotalLogs { get; set; } } diff --git a/API/DTOs/Stats/FileFormatDto.cs b/API/DTOs/Stats/FileFormatDto.cs new file mode 100644 index 000000000..67385e746 --- /dev/null +++ b/API/DTOs/Stats/FileFormatDto.cs @@ -0,0 +1,15 @@ +using API.Entities.Enums; + +namespace API.DTOs.Stats; + +public class FileFormatDto +{ + /// + /// The extension with the ., in lowercase + /// + public string Extension { get; set; } + /// + /// Format of extension + /// + public MangaFormat Format { get; set; } +} diff --git a/API/DTOs/Stats/ServerInfoDto.cs b/API/DTOs/Stats/ServerInfoDto.cs index 4b037a108..58700a770 100644 --- a/API/DTOs/Stats/ServerInfoDto.cs +++ b/API/DTOs/Stats/ServerInfoDto.cs @@ -1,123 +1,148 @@ -using API.Entities.Enums; +using System.Collections.Generic; +using API.Entities.Enums; +using Microsoft.AspNetCore.Mvc.RazorPages; -namespace API.DTOs.Stats +namespace API.DTOs.Stats; + +/// +/// Represents information about a Kavita Installation +/// +public class ServerInfoDto { /// - /// Represents information about a Kavita Installation + /// Unique Id that represents a unique install /// - public class ServerInfoDto - { - /// - /// Unique Id that represents a unique install - /// - public string InstallId { get; set; } - public string Os { get; set; } - /// - /// If the Kavita install is using Docker - /// - public bool IsDocker { get; set; } - /// - /// Version of .NET instance is running - /// - public string DotnetVersion { get; set; } - /// - /// Version of Kavita - /// - public string KavitaVersion { get; set; } - /// - /// Number of Cores on the instance - /// - public int NumOfCores { get; set; } - /// - /// The number of libraries on the instance - /// - public int NumberOfLibraries { get; set; } - /// - /// Does any user have bookmarks - /// - public bool HasBookmarks { get; set; } - /// - /// The site theme the install is using - /// - /// Introduced in v0.5.2 - public string ActiveSiteTheme { get; set; } - /// - /// The reading mode the main user has as a preference - /// - /// Introduced in v0.5.2 - public ReaderMode MangaReaderMode { get; set; } + public string InstallId { get; set; } + public string Os { get; set; } + /// + /// If the Kavita install is using Docker + /// + public bool IsDocker { get; set; } + /// + /// Version of .NET instance is running + /// + public string DotnetVersion { get; set; } + /// + /// Version of Kavita + /// + public string KavitaVersion { get; set; } + /// + /// Number of Cores on the instance + /// + public int NumOfCores { get; set; } + /// + /// The number of libraries on the instance + /// + public int NumberOfLibraries { get; set; } + /// + /// Does any user have bookmarks + /// + public bool HasBookmarks { get; set; } + /// + /// The site theme the install is using + /// + /// Introduced in v0.5.2 + public string ActiveSiteTheme { get; set; } + /// + /// The reading mode the main user has as a preference + /// + /// Introduced in v0.5.2 + public ReaderMode MangaReaderMode { get; set; } - /// - /// Number of users on the install - /// - /// Introduced in v0.5.2 - public int NumberOfUsers { get; set; } + /// + /// Number of users on the install + /// + /// Introduced in v0.5.2 + public int NumberOfUsers { get; set; } - /// - /// Number of collections on the install - /// - /// Introduced in v0.5.2 - public int NumberOfCollections { get; set; } - /// - /// Number of reading lists on the install (Sum of all users) - /// - /// Introduced in v0.5.2 - public int NumberOfReadingLists { get; set; } - /// - /// Is OPDS enabled - /// - /// Introduced in v0.5.2 - public bool OPDSEnabled { get; set; } - /// - /// Total number of files in the instance - /// - /// Introduced in v0.5.2 - public int TotalFiles { get; set; } - /// - /// Total number of Genres in the instance - /// - /// Introduced in v0.5.4 - public int TotalGenres { get; set; } - /// - /// Total number of People in the instance - /// - /// Introduced in v0.5.4 - public int TotalPeople { get; set; } - /// - /// Is this instance storing bookmarks as WebP - /// - /// Introduced in v0.5.4 - public bool StoreBookmarksAsWebP { get; set; } - /// - /// Number of users on this instance using Card Layout - /// - /// Introduced in v0.5.4 - public int UsersOnCardLayout { get; set; } - /// - /// Number of users on this instance using List Layout - /// - /// Introduced in v0.5.4 - public int UsersOnListLayout { get; set; } - /// - /// Max number of Series for any library on the instance - /// - /// Introduced in v0.5.4 - public int MaxSeriesInALibrary { get; set; } - /// - /// Max number of Volumes for any library on the instance - /// - /// Introduced in v0.5.4 - public int MaxVolumesInASeries { get; set; } - /// - /// Max number of Chapters for any library on the instance - /// - /// Introduced in v0.5.4 - public int MaxChaptersInASeries { get; set; } - /// - /// Does this instance have relationships setup between series - /// - /// Introduced in v0.5.4 - public bool UsingSeriesRelationships { get; set; } - - } + /// + /// Number of collections on the install + /// + /// Introduced in v0.5.2 + public int NumberOfCollections { get; set; } + /// + /// Number of reading lists on the install (Sum of all users) + /// + /// Introduced in v0.5.2 + public int NumberOfReadingLists { get; set; } + /// + /// Is OPDS enabled + /// + /// Introduced in v0.5.2 + public bool OPDSEnabled { get; set; } + /// + /// Total number of files in the instance + /// + /// Introduced in v0.5.2 + public int TotalFiles { get; set; } + /// + /// Total number of Genres in the instance + /// + /// Introduced in v0.5.4 + public int TotalGenres { get; set; } + /// + /// Total number of People in the instance + /// + /// Introduced in v0.5.4 + public int TotalPeople { get; set; } + /// + /// Is this instance storing bookmarks as WebP + /// + /// Introduced in v0.5.4 + public bool StoreBookmarksAsWebP { get; set; } + /// + /// Number of users on this instance using Card Layout + /// + /// Introduced in v0.5.4 + public int UsersOnCardLayout { get; set; } + /// + /// Number of users on this instance using List Layout + /// + /// Introduced in v0.5.4 + public int UsersOnListLayout { get; set; } + /// + /// Max number of Series for any library on the instance + /// + /// Introduced in v0.5.4 + public int MaxSeriesInALibrary { get; set; } + /// + /// Max number of Volumes for any library on the instance + /// + /// Introduced in v0.5.4 + public int MaxVolumesInASeries { get; set; } + /// + /// Max number of Chapters for any library on the instance + /// + /// Introduced in v0.5.4 + public int MaxChaptersInASeries { get; set; } + /// + /// Does this instance have relationships setup between series + /// + /// Introduced in v0.5.4 + public bool UsingSeriesRelationships { get; set; } + /// + /// A list of background colors set on the instance + /// + /// Introduced in v0.6.0 + public IEnumerable MangaReaderBackgroundColors { get; set; } + /// + /// A list of Page Split defaults being used on the instance + /// + /// Introduced in v0.6.0 + public IEnumerable MangaReaderPageSplittingModes { get; set; } + /// + /// A list of Layout Mode defaults being used on the instance + /// + /// Introduced in v0.6.0 + public IEnumerable MangaReaderLayoutModes { get; set; } + /// + /// A list of file formats existing in the instance + /// + /// Introduced in v0.6.0 + public IEnumerable FileFormats { get; set; } + /// + /// If there is at least one user that is using an age restricted profile on the instance + /// + /// Introduced in v0.6.0 + public bool UsingRestrictedProfiles { get; set; } } diff --git a/API/DTOs/Update/UpdateNotificationDto.cs b/API/DTOs/Update/UpdateNotificationDto.cs index 66c979cc4..030227a45 100644 --- a/API/DTOs/Update/UpdateNotificationDto.cs +++ b/API/DTOs/Update/UpdateNotificationDto.cs @@ -1,42 +1,41 @@ -namespace API.DTOs.Update +namespace API.DTOs.Update; + +/// +/// Update Notification denoting a new release available for user to update to +/// +public class UpdateNotificationDto { /// - /// Update Notification denoting a new release available for user to update to + /// Current installed Version /// - public class UpdateNotificationDto - { - /// - /// Current installed Version - /// - public string CurrentVersion { get; init; } - /// - /// Semver of the release version - /// 0.4.3 - /// - public string UpdateVersion { get; init; } - /// - /// Release body in HTML - /// - public string UpdateBody { get; init; } - /// - /// Title of the release - /// - public string UpdateTitle { get; init; } - /// - /// Github Url - /// - public string UpdateUrl { get; init; } - /// - /// If this install is within Docker - /// - public bool IsDocker { get; init; } - /// - /// Is this a pre-release - /// - public bool IsPrerelease { get; init; } - /// - /// Date of the publish - /// - public string PublishDate { get; init; } - } + public string CurrentVersion { get; init; } + /// + /// Semver of the release version + /// 0.4.3 + /// + public string UpdateVersion { get; init; } + /// + /// Release body in HTML + /// + public string UpdateBody { get; init; } + /// + /// Title of the release + /// + public string UpdateTitle { get; init; } + /// + /// Github Url + /// + public string UpdateUrl { get; init; } + /// + /// If this install is within Docker + /// + public bool IsDocker { get; init; } + /// + /// Is this a pre-release + /// + public bool IsPrerelease { get; init; } + /// + /// Date of the publish + /// + public string PublishDate { get; init; } } diff --git a/API/DTOs/UpdateLibraryDto.cs b/API/DTOs/UpdateLibraryDto.cs index f0908c7a2..4f527cb60 100644 --- a/API/DTOs/UpdateLibraryDto.cs +++ b/API/DTOs/UpdateLibraryDto.cs @@ -1,13 +1,12 @@ using System.Collections.Generic; using API.Entities.Enums; -namespace API.DTOs +namespace API.DTOs; + +public class UpdateLibraryDto { - public class UpdateLibraryDto - { - public int Id { get; init; } - public string Name { get; init; } - public LibraryType Type { get; set; } - public IEnumerable Folders { get; init; } - } + public int Id { get; init; } + public string Name { get; init; } + public LibraryType Type { get; set; } + public IEnumerable Folders { get; init; } } diff --git a/API/DTOs/UpdateLibraryForUserDto.cs b/API/DTOs/UpdateLibraryForUserDto.cs index 5280f3dd7..b2c752b22 100644 --- a/API/DTOs/UpdateLibraryForUserDto.cs +++ b/API/DTOs/UpdateLibraryForUserDto.cs @@ -1,10 +1,9 @@ using System.Collections.Generic; -namespace API.DTOs +namespace API.DTOs; + +public class UpdateLibraryForUserDto { - public class UpdateLibraryForUserDto - { - public string Username { get; init; } - public IEnumerable SelectedLibraries { get; init; } - } -} \ No newline at end of file + public string Username { get; init; } + public IEnumerable SelectedLibraries { get; init; } +} diff --git a/API/DTOs/UpdateRBSDto.cs b/API/DTOs/UpdateRBSDto.cs index 8bf37d314..f23edf784 100644 --- a/API/DTOs/UpdateRBSDto.cs +++ b/API/DTOs/UpdateRBSDto.cs @@ -1,10 +1,9 @@ using System.Collections.Generic; -namespace API.DTOs +namespace API.DTOs; + +public class UpdateRbsDto { - public class UpdateRbsDto - { - public string Username { get; init; } - public IList Roles { get; init; } - } -} \ No newline at end of file + public string Username { get; init; } + public IList Roles { get; init; } +} diff --git a/API/DTOs/UpdateSeriesDto.cs b/API/DTOs/UpdateSeriesDto.cs index 8f10373e4..c5db42e78 100644 --- a/API/DTOs/UpdateSeriesDto.cs +++ b/API/DTOs/UpdateSeriesDto.cs @@ -1,15 +1,14 @@ -namespace API.DTOs -{ - public class UpdateSeriesDto - { - public int Id { get; init; } - public string Name { get; init; } - public string LocalizedName { get; init; } - public string SortName { get; init; } - public bool CoverImageLocked { get; set; } +namespace API.DTOs; - public bool NameLocked { get; set; } - public bool SortNameLocked { get; set; } - public bool LocalizedNameLocked { get; set; } - } +public class UpdateSeriesDto +{ + public int Id { get; init; } + public string Name { get; init; } + public string LocalizedName { get; init; } + public string SortName { get; init; } + public bool CoverImageLocked { get; set; } + + public bool NameLocked { get; set; } + public bool SortNameLocked { get; set; } + public bool LocalizedNameLocked { get; set; } } diff --git a/API/DTOs/UpdateSeriesMetadataDto.cs b/API/DTOs/UpdateSeriesMetadataDto.cs index 08d3e77e6..f2724b628 100644 --- a/API/DTOs/UpdateSeriesMetadataDto.cs +++ b/API/DTOs/UpdateSeriesMetadataDto.cs @@ -1,11 +1,10 @@ using System.Collections.Generic; using API.DTOs.CollectionTags; -namespace API.DTOs +namespace API.DTOs; + +public class UpdateSeriesMetadataDto { - public class UpdateSeriesMetadataDto - { - public SeriesMetadataDto SeriesMetadata { get; set; } - public ICollection CollectionTags { get; set; } - } + public SeriesMetadataDto SeriesMetadata { get; set; } + public ICollection CollectionTags { get; set; } } diff --git a/API/DTOs/UpdateSeriesRatingDto.cs b/API/DTOs/UpdateSeriesRatingDto.cs index d8b8dac2d..167d321bb 100644 --- a/API/DTOs/UpdateSeriesRatingDto.cs +++ b/API/DTOs/UpdateSeriesRatingDto.cs @@ -1,12 +1,11 @@ using System.ComponentModel.DataAnnotations; -namespace API.DTOs +namespace API.DTOs; + +public class UpdateSeriesRatingDto { - public class UpdateSeriesRatingDto - { - public int SeriesId { get; init; } - public int UserRating { get; init; } - [MaxLength(1000)] - public string UserReview { get; init; } - } -} \ No newline at end of file + public int SeriesId { get; init; } + public int UserRating { get; init; } + [MaxLength(1000)] + public string UserReview { get; init; } +} diff --git a/API/DTOs/Uploads/UploadFileDto.cs b/API/DTOs/Uploads/UploadFileDto.cs index 42b889903..374f43b23 100644 --- a/API/DTOs/Uploads/UploadFileDto.cs +++ b/API/DTOs/Uploads/UploadFileDto.cs @@ -1,14 +1,13 @@ -namespace API.DTOs.Uploads +namespace API.DTOs.Uploads; + +public class UploadFileDto { - public class UploadFileDto - { - /// - /// Id of the Entity - /// - public int Id { get; set; } - /// - /// Base Url encoding of the file to upload from (can be null) - /// - public string Url { get; set; } - } + /// + /// Id of the Entity + /// + public int Id { get; set; } + /// + /// Base Url encoding of the file to upload from (can be null) + /// + public string Url { get; set; } } diff --git a/API/DTOs/UserDto.cs b/API/DTOs/UserDto.cs index dc6fc8b43..1e9cba267 100644 --- a/API/DTOs/UserDto.cs +++ b/API/DTOs/UserDto.cs @@ -1,13 +1,16 @@  -namespace API.DTOs +using API.DTOs.Account; +using API.Entities.Enums; + +namespace API.DTOs; + +public class UserDto { - public class UserDto - { - public string Username { get; init; } - public string Email { get; init; } - public string Token { get; set; } - public string RefreshToken { get; set; } - public string ApiKey { get; init; } - public UserPreferencesDto Preferences { get; set; } - } + public string Username { get; init; } + public string Email { get; init; } + public string Token { get; set; } + public string RefreshToken { get; set; } + public string ApiKey { get; init; } + public UserPreferencesDto Preferences { get; set; } + public AgeRestrictionDto AgeRestriction { get; init; } } diff --git a/API/DTOs/UserPreferencesDto.cs b/API/DTOs/UserPreferencesDto.cs index 255c21c1f..6e5d51442 100644 --- a/API/DTOs/UserPreferencesDto.cs +++ b/API/DTOs/UserPreferencesDto.cs @@ -1,120 +1,122 @@ -using System; -using System.ComponentModel.DataAnnotations; -using API.DTOs.Theme; +using System.ComponentModel.DataAnnotations; using API.Entities; using API.Entities.Enums; using API.Entities.Enums.UserPreferences; -namespace API.DTOs +namespace API.DTOs; + +public class UserPreferencesDto { - public class UserPreferencesDto - { - /// - /// Manga Reader Option: What direction should the next/prev page buttons go - /// - [Required] - public ReadingDirection ReadingDirection { get; set; } - /// - /// Manga Reader Option: How should the image be scaled to screen - /// - [Required] - public ScalingOption ScalingOption { get; set; } - /// - /// Manga Reader Option: Which side of a split image should we show first - /// - [Required] - public PageSplitOption PageSplitOption { get; set; } - /// - /// Manga Reader Option: How the manga reader should perform paging or reading of the file - /// - /// Webtoon uses scrolling to page, LeftRight uses paging by clicking left/right side of reader, UpDown uses paging - /// by clicking top/bottom sides of reader. - /// - /// - [Required] - public ReaderMode ReaderMode { get; set; } - /// - /// Manga Reader Option: How many pages to display in the reader at once - /// - [Required] - public LayoutMode LayoutMode { get; set; } - /// - /// Manga Reader Option: Background color of the reader - /// - [Required] - public string BackgroundColor { get; set; } = "#000000"; - /// - /// Manga Reader Option: Allow the menu to close after 6 seconds without interaction - /// - [Required] - public bool AutoCloseMenu { get; set; } - /// - /// Manga Reader Option: Show screen hints to the user on some actions, ie) pagination direction change - /// - [Required] - public bool ShowScreenHints { get; set; } = true; - /// - /// Book Reader Option: Override extra Margin - /// - [Required] - public int BookReaderMargin { get; set; } - /// - /// Book Reader Option: Override line-height - /// - [Required] - public int BookReaderLineSpacing { get; set; } - /// - /// Book Reader Option: Override font size - /// - [Required] - public int BookReaderFontSize { get; set; } - /// - /// Book Reader Option: Maps to the default Kavita font-family (inherit) or an override - /// - [Required] - public string BookReaderFontFamily { get; set; } - /// - /// Book Reader Option: Allows tapping on side of screens to paginate - /// - [Required] - public bool BookReaderTapToPaginate { get; set; } - /// - /// Book Reader Option: What direction should the next/prev page buttons go - /// - [Required] - public ReadingDirection BookReaderReadingDirection { get; set; } - /// - /// UI Site Global Setting: The UI theme the user should use. - /// - /// Should default to Dark - [Required] - public SiteTheme Theme { get; set; } - [Required] - public string BookReaderThemeName { get; set; } - [Required] - public BookPageLayoutMode BookReaderLayoutMode { get; set; } - /// - /// Book Reader Option: A flag that hides the menu-ing system behind a click on the screen. This should be used with tap to paginate, but the app doesn't enforce this. - /// - /// Defaults to false - [Required] - public bool BookReaderImmersiveMode { get; set; } = false; - /// - /// Global Site Option: If the UI should layout items as Cards or List items - /// - /// Defaults to Cards - [Required] - public PageLayoutMode GlobalPageLayoutMode { get; set; } = PageLayoutMode.Cards; - /// - /// UI Site Global Setting: If unread summaries should be blurred until expanded or unless user has read it already - /// - /// Defaults to false - [Required] - public bool BlurUnreadSummaries { get; set; } = false; - /// - /// UI Site Global Setting: Should Kavita prompt user to confirm downloads that are greater than 100 MB. - /// - [Required] - public bool PromptForDownloadSize { get; set; } = true; - } + /// + /// Manga Reader Option: What direction should the next/prev page buttons go + /// + [Required] + public ReadingDirection ReadingDirection { get; set; } + /// + /// Manga Reader Option: How should the image be scaled to screen + /// + [Required] + public ScalingOption ScalingOption { get; set; } + /// + /// Manga Reader Option: Which side of a split image should we show first + /// + [Required] + public PageSplitOption PageSplitOption { get; set; } + /// + /// Manga Reader Option: How the manga reader should perform paging or reading of the file + /// + /// Webtoon uses scrolling to page, LeftRight uses paging by clicking left/right side of reader, UpDown uses paging + /// by clicking top/bottom sides of reader. + /// + /// + [Required] + public ReaderMode ReaderMode { get; set; } + /// + /// Manga Reader Option: How many pages to display in the reader at once + /// + [Required] + public LayoutMode LayoutMode { get; set; } + /// + /// Manga Reader Option: Background color of the reader + /// + [Required] + public string BackgroundColor { get; set; } = "#000000"; + /// + /// Manga Reader Option: Allow the menu to close after 6 seconds without interaction + /// + [Required] + public bool AutoCloseMenu { get; set; } + /// + /// Manga Reader Option: Show screen hints to the user on some actions, ie) pagination direction change + /// + [Required] + public bool ShowScreenHints { get; set; } = true; + /// + /// Book Reader Option: Override extra Margin + /// + [Required] + public int BookReaderMargin { get; set; } + /// + /// Book Reader Option: Override line-height + /// + [Required] + public int BookReaderLineSpacing { get; set; } + /// + /// Book Reader Option: Override font size + /// + [Required] + public int BookReaderFontSize { get; set; } + /// + /// Book Reader Option: Maps to the default Kavita font-family (inherit) or an override + /// + [Required] + public string BookReaderFontFamily { get; set; } + /// + /// Book Reader Option: Allows tapping on side of screens to paginate + /// + [Required] + public bool BookReaderTapToPaginate { get; set; } + /// + /// Book Reader Option: What direction should the next/prev page buttons go + /// + [Required] + public ReadingDirection BookReaderReadingDirection { get; set; } + + /// + /// UI Site Global Setting: The UI theme the user should use. + /// + /// Should default to Dark + public SiteTheme Theme { get; set; } + [Required] + public string BookReaderThemeName { get; set; } + [Required] + public BookPageLayoutMode BookReaderLayoutMode { get; set; } + /// + /// Book Reader Option: A flag that hides the menu-ing system behind a click on the screen. This should be used with tap to paginate, but the app doesn't enforce this. + /// + /// Defaults to false + [Required] + public bool BookReaderImmersiveMode { get; set; } = false; + /// + /// Global Site Option: If the UI should layout items as Cards or List items + /// + /// Defaults to Cards + [Required] + public PageLayoutMode GlobalPageLayoutMode { get; set; } = PageLayoutMode.Cards; + /// + /// UI Site Global Setting: If unread summaries should be blurred until expanded or unless user has read it already + /// + /// Defaults to false + [Required] + public bool BlurUnreadSummaries { get; set; } = false; + /// + /// UI Site Global Setting: Should Kavita prompt user to confirm downloads that are greater than 100 MB. + /// + [Required] + public bool PromptForDownloadSize { get; set; } = true; + /// + /// UI Site Global Setting: Should Kavita disable CSS transitions + /// + [Required] + public bool NoTransitions { get; set; } = false; } diff --git a/API/DTOs/VolumeDto.cs b/API/DTOs/VolumeDto.cs index 5a20e61a5..4ef20950a 100644 --- a/API/DTOs/VolumeDto.cs +++ b/API/DTOs/VolumeDto.cs @@ -4,26 +4,25 @@ using System.Collections.Generic; using API.Entities; using API.Entities.Interfaces; -namespace API.DTOs +namespace API.DTOs; + +public class VolumeDto : IHasReadTimeEstimate { - public class VolumeDto : IHasReadTimeEstimate - { - public int Id { get; set; } - /// - public int Number { get; set; } - /// - public string Name { get; set; } - public int Pages { get; set; } - public int PagesRead { get; set; } - public DateTime LastModified { get; set; } - public DateTime Created { get; set; } - public int SeriesId { get; set; } - public ICollection Chapters { get; set; } - /// - public int MinHoursToRead { get; set; } - /// - public int MaxHoursToRead { get; set; } - /// - public int AvgHoursToRead { get; set; } - } + public int Id { get; set; } + /// + public int Number { get; set; } + /// + public string Name { get; set; } + public int Pages { get; set; } + public int PagesRead { get; set; } + public DateTime LastModified { get; set; } + public DateTime Created { get; set; } + public int SeriesId { get; set; } + public ICollection Chapters { get; set; } + /// + public int MinHoursToRead { get; set; } + /// + public int MaxHoursToRead { get; set; } + /// + public int AvgHoursToRead { get; set; } } diff --git a/API/Data/DataContext.cs b/API/Data/DataContext.cs index 7c76e4a78..c00289227 100644 --- a/API/Data/DataContext.cs +++ b/API/Data/DataContext.cs @@ -11,141 +11,141 @@ using Microsoft.AspNetCore.Identity.EntityFrameworkCore; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.ChangeTracking; -namespace API.Data +namespace API.Data; + +public sealed class DataContext : IdentityDbContext, AppUserRole, IdentityUserLogin, + IdentityRoleClaim, IdentityUserToken> { - public sealed class DataContext : IdentityDbContext, AppUserRole, IdentityUserLogin, - IdentityRoleClaim, IdentityUserToken> + public DataContext(DbContextOptions options) : base(options) { - public DataContext(DbContextOptions options) : base(options) - { - ChangeTracker.Tracked += OnEntityTracked; - ChangeTracker.StateChanged += OnEntityStateChanged; - } - - public DbSet Library { get; set; } - public DbSet Series { get; set; } - public DbSet Chapter { get; set; } - public DbSet Volume { get; set; } - public DbSet AppUser { get; set; } - public DbSet MangaFile { get; set; } - public DbSet AppUserProgresses { get; set; } - public DbSet AppUserRating { get; set; } - public DbSet ServerSetting { get; set; } - public DbSet AppUserPreferences { get; set; } - public DbSet SeriesMetadata { get; set; } - public DbSet CollectionTag { get; set; } - public DbSet AppUserBookmark { get; set; } - public DbSet ReadingList { get; set; } - public DbSet ReadingListItem { get; set; } - public DbSet Person { get; set; } - public DbSet Genre { get; set; } - public DbSet Tag { get; set; } - public DbSet SiteTheme { get; set; } - public DbSet SeriesRelation { get; set; } - public DbSet FolderPath { get; set; } - - - protected override void OnModelCreating(ModelBuilder builder) - { - base.OnModelCreating(builder); - - - builder.Entity() - .HasMany(ur => ur.UserRoles) - .WithOne(u => u.User) - .HasForeignKey(ur => ur.UserId) - .IsRequired(); - - builder.Entity() - .HasMany(ur => ur.UserRoles) - .WithOne(u => u.Role) - .HasForeignKey(ur => ur.RoleId) - .IsRequired(); - - builder.Entity() - .HasOne(pt => pt.Series) - .WithMany(p => p.Relations) - .HasForeignKey(pt => pt.SeriesId) - .OnDelete(DeleteBehavior.ClientCascade); - - builder.Entity() - .HasOne(pt => pt.TargetSeries) - .WithMany(t => t.RelationOf) - .HasForeignKey(pt => pt.TargetSeriesId) - .OnDelete(DeleteBehavior.ClientCascade); - - - builder.Entity() - .Property(b => b.BookThemeName) - .HasDefaultValue("Dark"); - builder.Entity() - .Property(b => b.BackgroundColor) - .HasDefaultValue("#000000"); - - builder.Entity() - .Property(b => b.GlobalPageLayoutMode) - .HasDefaultValue(PageLayoutMode.Cards); - } - - - private static void OnEntityTracked(object sender, EntityTrackedEventArgs e) - { - if (!e.FromQuery && e.Entry.State == EntityState.Added && e.Entry.Entity is IEntityDate entity) - { - entity.Created = DateTime.Now; - entity.LastModified = DateTime.Now; - } - - } - - private static void OnEntityStateChanged(object sender, EntityStateChangedEventArgs e) - { - if (e.NewState == EntityState.Modified && e.Entry.Entity is IEntityDate entity) - entity.LastModified = DateTime.Now; - } - - private void OnSaveChanges() - { - foreach (var saveEntity in ChangeTracker.Entries() - .Where(e => e.State == EntityState.Modified) - .Select(entry => entry.Entity) - .OfType()) - { - saveEntity.OnSavingChanges(); - } - } - - #region SaveChanges overrides - - public override int SaveChanges() - { - this.OnSaveChanges(); - - return base.SaveChanges(); - } - - public override int SaveChanges(bool acceptAllChangesOnSuccess) - { - this.OnSaveChanges(); - - return base.SaveChanges(acceptAllChangesOnSuccess); - } - - public override Task SaveChangesAsync(bool acceptAllChangesOnSuccess, CancellationToken cancellationToken = default(CancellationToken)) - { - this.OnSaveChanges(); - - return base.SaveChangesAsync(acceptAllChangesOnSuccess, cancellationToken); - } - - public override Task SaveChangesAsync(CancellationToken cancellationToken = default(CancellationToken)) - { - this.OnSaveChanges(); - - return base.SaveChangesAsync(cancellationToken); - } - - #endregion + ChangeTracker.Tracked += OnEntityTracked; + ChangeTracker.StateChanged += OnEntityStateChanged; } + + public DbSet Library { get; set; } + public DbSet Series { get; set; } + public DbSet Chapter { get; set; } + public DbSet Volume { get; set; } + public DbSet AppUser { get; set; } + public DbSet MangaFile { get; set; } + public DbSet AppUserProgresses { get; set; } + public DbSet AppUserRating { get; set; } + public DbSet ServerSetting { get; set; } + public DbSet AppUserPreferences { get; set; } + public DbSet SeriesMetadata { get; set; } + public DbSet CollectionTag { get; set; } + public DbSet AppUserBookmark { get; set; } + public DbSet ReadingList { get; set; } + public DbSet ReadingListItem { get; set; } + public DbSet Person { get; set; } + public DbSet Genre { get; set; } + public DbSet Tag { get; set; } + public DbSet SiteTheme { get; set; } + public DbSet SeriesRelation { get; set; } + public DbSet FolderPath { get; set; } + public DbSet Device { get; set; } + + + protected override void OnModelCreating(ModelBuilder builder) + { + base.OnModelCreating(builder); + + + builder.Entity() + .HasMany(ur => ur.UserRoles) + .WithOne(u => u.User) + .HasForeignKey(ur => ur.UserId) + .IsRequired(); + + builder.Entity() + .HasMany(ur => ur.UserRoles) + .WithOne(u => u.Role) + .HasForeignKey(ur => ur.RoleId) + .IsRequired(); + + builder.Entity() + .HasOne(pt => pt.Series) + .WithMany(p => p.Relations) + .HasForeignKey(pt => pt.SeriesId) + .OnDelete(DeleteBehavior.ClientCascade); + + builder.Entity() + .HasOne(pt => pt.TargetSeries) + .WithMany(t => t.RelationOf) + .HasForeignKey(pt => pt.TargetSeriesId) + .OnDelete(DeleteBehavior.ClientCascade); + + + builder.Entity() + .Property(b => b.BookThemeName) + .HasDefaultValue("Dark"); + builder.Entity() + .Property(b => b.BackgroundColor) + .HasDefaultValue("#000000"); + + builder.Entity() + .Property(b => b.GlobalPageLayoutMode) + .HasDefaultValue(PageLayoutMode.Cards); + } + + + private static void OnEntityTracked(object sender, EntityTrackedEventArgs e) + { + if (!e.FromQuery && e.Entry.State == EntityState.Added && e.Entry.Entity is IEntityDate entity) + { + entity.Created = DateTime.Now; + entity.LastModified = DateTime.Now; + } + + } + + private static void OnEntityStateChanged(object sender, EntityStateChangedEventArgs e) + { + if (e.NewState == EntityState.Modified && e.Entry.Entity is IEntityDate entity) + entity.LastModified = DateTime.Now; + } + + private void OnSaveChanges() + { + foreach (var saveEntity in ChangeTracker.Entries() + .Where(e => e.State == EntityState.Modified) + .Select(entry => entry.Entity) + .OfType()) + { + saveEntity.OnSavingChanges(); + } + } + + #region SaveChanges overrides + + public override int SaveChanges() + { + this.OnSaveChanges(); + + return base.SaveChanges(); + } + + public override int SaveChanges(bool acceptAllChangesOnSuccess) + { + this.OnSaveChanges(); + + return base.SaveChanges(acceptAllChangesOnSuccess); + } + + public override Task SaveChangesAsync(bool acceptAllChangesOnSuccess, CancellationToken cancellationToken = default(CancellationToken)) + { + this.OnSaveChanges(); + + return base.SaveChangesAsync(acceptAllChangesOnSuccess, cancellationToken); + } + + public override Task SaveChangesAsync(CancellationToken cancellationToken = default(CancellationToken)) + { + this.OnSaveChanges(); + + return base.SaveChangesAsync(cancellationToken); + } + + #endregion } diff --git a/API/Data/DbFactory.cs b/API/Data/DbFactory.cs index 921b55c54..891c10843 100644 --- a/API/Data/DbFactory.cs +++ b/API/Data/DbFactory.cs @@ -9,162 +9,169 @@ using API.Extensions; using API.Parser; using API.Services.Tasks; -namespace API.Data +namespace API.Data; + +/// +/// Responsible for creating Series, Volume, Chapter, MangaFiles for use in +/// +public static class DbFactory { - /// - /// Responsible for creating Series, Volume, Chapter, MangaFiles for use in - /// - public static class DbFactory + public static Series Series(string name) { - public static Series Series(string name) + return new Series { - return new Series - { - Name = name, - OriginalName = name, - LocalizedName = name, - NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), - NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), - SortName = name, - Volumes = new List(), - Metadata = SeriesMetadata(Array.Empty()) - }; - } - - public static Series Series(string name, string localizedName) - { - if (string.IsNullOrEmpty(localizedName)) - { - localizedName = name; - } - return new Series - { - Name = name, - OriginalName = name, - LocalizedName = localizedName, - NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), - NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(localizedName), - SortName = name, - Volumes = new List(), - Metadata = SeriesMetadata(Array.Empty()) - }; - } - - public static Volume Volume(string volumeNumber) - { - return new Volume() - { - Name = volumeNumber, - Number = (int) Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(volumeNumber), - Chapters = new List() - }; - } - - public static Chapter Chapter(ParserInfo info) - { - var specialTreatment = info.IsSpecialInfo(); - var specialTitle = specialTreatment ? info.Filename : info.Chapters; - return new Chapter() - { - Number = specialTreatment ? "0" : Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(info.Chapters) + string.Empty, - Range = specialTreatment ? info.Filename : info.Chapters, - Title = (specialTreatment && info.Format == MangaFormat.Epub) - ? info.Title - : specialTitle, - Files = new List(), - IsSpecial = specialTreatment, - }; - } - - public static SeriesMetadata SeriesMetadata(ComicInfo info) - { - return SeriesMetadata(Array.Empty()); - } - - public static SeriesMetadata SeriesMetadata(ICollection collectionTags) - { - return new SeriesMetadata() - { - CollectionTags = collectionTags, - Summary = string.Empty - }; - } - - public static CollectionTag CollectionTag(int id, string title, string summary, bool promoted) - { - return new CollectionTag() - { - Id = id, - NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(title?.Trim()).ToUpper(), - Title = title?.Trim(), - Summary = summary?.Trim(), - Promoted = promoted - }; - } - - public static ReadingList ReadingList(string title, string summary, bool promoted) - { - return new ReadingList() - { - NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(title?.Trim()).ToUpper(), - Title = title?.Trim(), - Summary = summary?.Trim(), - Promoted = promoted, - Items = new List() - }; - } - - public static ReadingListItem ReadingListItem(int index, int seriesId, int volumeId, int chapterId) - { - return new ReadingListItem() - { - Order = index, - ChapterId = chapterId, - SeriesId = seriesId, - VolumeId = volumeId - }; - } - - public static Genre Genre(string name, bool external) - { - return new Genre() - { - Title = name.Trim().SentenceCase(), - NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(name), - ExternalTag = external - }; - } - - public static Tag Tag(string name, bool external) - { - return new Tag() - { - Title = name.Trim().SentenceCase(), - NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(name), - ExternalTag = external - }; - } - - public static Person Person(string name, PersonRole role) - { - return new Person() - { - Name = name.Trim(), - NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), - Role = role - }; - } - - public static MangaFile MangaFile(string filePath, MangaFormat format, int pages) - { - return new MangaFile() - { - FilePath = filePath, - Format = format, - Pages = pages, - LastModified = File.GetLastWriteTime(filePath) // NOTE: Changed this from DateTime.Now - }; - } - + Name = name, + OriginalName = name, + LocalizedName = name, + NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), + NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), + SortName = name, + Volumes = new List(), + Metadata = SeriesMetadata(Array.Empty()) + }; } + + public static Series Series(string name, string localizedName) + { + if (string.IsNullOrEmpty(localizedName)) + { + localizedName = name; + } + return new Series + { + Name = name, + OriginalName = name, + LocalizedName = localizedName, + NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), + NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(localizedName), + SortName = name, + Volumes = new List(), + Metadata = SeriesMetadata(Array.Empty()) + }; + } + + public static Volume Volume(string volumeNumber) + { + return new Volume() + { + Name = volumeNumber, + Number = (int) Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(volumeNumber), + Chapters = new List() + }; + } + + public static Chapter Chapter(ParserInfo info) + { + var specialTreatment = info.IsSpecialInfo(); + var specialTitle = specialTreatment ? info.Filename : info.Chapters; + return new Chapter() + { + Number = specialTreatment ? "0" : Services.Tasks.Scanner.Parser.Parser.MinNumberFromRange(info.Chapters) + string.Empty, + Range = specialTreatment ? info.Filename : info.Chapters, + Title = (specialTreatment && info.Format == MangaFormat.Epub) + ? info.Title + : specialTitle, + Files = new List(), + IsSpecial = specialTreatment, + }; + } + + public static SeriesMetadata SeriesMetadata(ComicInfo info) + { + return SeriesMetadata(Array.Empty()); + } + + public static SeriesMetadata SeriesMetadata(ICollection collectionTags) + { + return new SeriesMetadata() + { + CollectionTags = collectionTags, + Summary = string.Empty + }; + } + + public static CollectionTag CollectionTag(int id, string title, string summary, bool promoted) + { + return new CollectionTag() + { + Id = id, + NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(title?.Trim()).ToUpper(), + Title = title?.Trim(), + Summary = summary?.Trim(), + Promoted = promoted + }; + } + + public static ReadingList ReadingList(string title, string summary, bool promoted) + { + return new ReadingList() + { + NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(title?.Trim()).ToUpper(), + Title = title?.Trim(), + Summary = summary?.Trim(), + Promoted = promoted, + Items = new List() + }; + } + + public static ReadingListItem ReadingListItem(int index, int seriesId, int volumeId, int chapterId) + { + return new ReadingListItem() + { + Order = index, + ChapterId = chapterId, + SeriesId = seriesId, + VolumeId = volumeId + }; + } + + public static Genre Genre(string name, bool external) + { + return new Genre() + { + Title = name.Trim().SentenceCase(), + NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(name), + ExternalTag = external + }; + } + + public static Tag Tag(string name, bool external) + { + return new Tag() + { + Title = name.Trim().SentenceCase(), + NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(name), + ExternalTag = external + }; + } + + public static Person Person(string name, PersonRole role) + { + return new Person() + { + Name = name.Trim(), + NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name), + Role = role + }; + } + + public static MangaFile MangaFile(string filePath, MangaFormat format, int pages) + { + return new MangaFile() + { + FilePath = filePath, + Format = format, + Pages = pages, + LastModified = File.GetLastWriteTime(filePath) + }; + } + + public static Device Device(string name) + { + return new Device() + { + Name = name, + }; + } + } diff --git a/API/Data/LogLevelOptions.cs b/API/Data/LogLevelOptions.cs deleted file mode 100644 index dfdfd111f..000000000 --- a/API/Data/LogLevelOptions.cs +++ /dev/null @@ -1,9 +0,0 @@ -namespace API.Data -{ - public class LogLevelOptions - { - public const string Logging = "LogLevel"; - - public string Default { get; set; } - } -} \ No newline at end of file diff --git a/API/Data/Metadata/ComicInfo.cs b/API/Data/Metadata/ComicInfo.cs index d34901daa..e1b4ee994 100644 --- a/API/Data/Metadata/ComicInfo.cs +++ b/API/Data/Metadata/ComicInfo.cs @@ -3,122 +3,144 @@ using System.Linq; using API.Entities.Enums; using Kavita.Common.Extensions; -namespace API.Data.Metadata +namespace API.Data.Metadata; + +/// +/// A representation of a ComicInfo.xml file +/// +/// See reference of the loose spec here: https://anansi-project.github.io/docs/comicinfo/documentation +public class ComicInfo { + public string Summary { get; set; } = string.Empty; + public string Title { get; set; } = string.Empty; + public string Series { get; set; } = string.Empty; /// - /// A representation of a ComicInfo.xml file + /// Localized Series name. Not standard. /// - /// See reference of the loose spec here: https://anansi-project.github.io/docs/comicinfo/documentation - public class ComicInfo + public string LocalizedSeries { get; set; } = string.Empty; + public string SeriesSort { get; set; } = string.Empty; + public string Number { get; set; } = string.Empty; + /// + /// The total number of items in the series. + /// + [System.ComponentModel.DefaultValueAttribute(0)] + public int Count { get; set; } = 0; + public string Volume { get; set; } = string.Empty; + public string Notes { get; set; } = string.Empty; + public string Genre { get; set; } = string.Empty; + public int PageCount { get; set; } + // ReSharper disable once InconsistentNaming + /// + /// IETF BCP 47 Code to represent the language of the content + /// + public string LanguageISO { get; set; } = string.Empty; + /// + /// This is the link to where the data was scraped from + /// + public string Web { get; set; } = string.Empty; + [System.ComponentModel.DefaultValueAttribute(0)] + public int Day { get; set; } = 0; + [System.ComponentModel.DefaultValueAttribute(0)] + public int Month { get; set; } = 0; + [System.ComponentModel.DefaultValueAttribute(0)] + public int Year { get; set; } = 0; + + + /// + /// Rating based on the content. Think PG-13, R for movies. See for valid types + /// + public string AgeRating { get; set; } = string.Empty; + /// + /// User's rating of the content + /// + public float UserRating { get; set; } + + public string StoryArc { get; set; } = string.Empty; + public string SeriesGroup { get; set; } = string.Empty; + public string AlternateNumber { get; set; } = string.Empty; + [System.ComponentModel.DefaultValueAttribute(0)] + public int AlternateCount { get; set; } = 0; + public string AlternateSeries { get; set; } = string.Empty; + + /// + /// This is Epub only: calibre:title_sort + /// Represents the sort order for the title + /// + public string TitleSort { get; set; } = string.Empty; + /// + /// This comes from ComicInfo and is free form text. We use this to validate against a set of tags and mark a file as + /// special. + /// + public string Format { get; set; } = string.Empty; + + /// + /// The translator, can be comma separated. This is part of ComicInfo.xml draft v2.1 + /// + /// See https://github.com/anansi-project/comicinfo/issues/2 for information about this tag + public string Translator { get; set; } = string.Empty; + /// + /// Misc tags. This is part of ComicInfo.xml draft v2.1 + /// + /// See https://github.com/anansi-project/comicinfo/issues/1 for information about this tag + public string Tags { get; set; } = string.Empty; + + /// + /// This is the Author. For Books, we map creator tag in OPF to this field. Comma separated if multiple. + /// + public string Writer { get; set; } = string.Empty; + public string Penciller { get; set; } = string.Empty; + public string Inker { get; set; } = string.Empty; + public string Colorist { get; set; } = string.Empty; + public string Letterer { get; set; } = string.Empty; + public string CoverArtist { get; set; } = string.Empty; + public string Editor { get; set; } = string.Empty; + public string Publisher { get; set; } = string.Empty; + public string Characters { get; set; } = string.Empty; + + public static AgeRating ConvertAgeRatingToEnum(string value) { - public string Summary { get; set; } = string.Empty; - public string Title { get; set; } = string.Empty; - public string Series { get; set; } = string.Empty; - /// - /// Localized Series name. Not standard. - /// - public string LocalizedSeries { get; set; } = string.Empty; - public string SeriesSort { get; set; } = string.Empty; - public string Number { get; set; } = string.Empty; - /// - /// The total number of items in the series. - /// - public int Count { get; set; } = 0; - public string Volume { get; set; } = string.Empty; - public string Notes { get; set; } = string.Empty; - public string Genre { get; set; } = string.Empty; - public int PageCount { get; set; } - // ReSharper disable once InconsistentNaming - /// - /// IETF BCP 47 Code to represent the language of the content - /// - public string LanguageISO { get; set; } = string.Empty; - /// - /// This is the link to where the data was scraped from - /// - public string Web { get; set; } = string.Empty; - public int Day { get; set; } = 0; - public int Month { get; set; } = 0; - public int Year { get; set; } = 0; - - - /// - /// Rating based on the content. Think PG-13, R for movies. See for valid types - /// - public string AgeRating { get; set; } = string.Empty; - /// - /// User's rating of the content - /// - public float UserRating { get; set; } - - public string StoryArc { get; set; } = string.Empty; - public string SeriesGroup { get; set; } = string.Empty; - public string AlternateNumber { get; set; } = string.Empty; - public int AlternateCount { get; set; } = 0; - public string AlternateSeries { get; set; } = string.Empty; - - /// - /// This is Epub only: calibre:title_sort - /// Represents the sort order for the title - /// - public string TitleSort { get; set; } = string.Empty; - /// - /// This comes from ComicInfo and is free form text. We use this to validate against a set of tags and mark a file as - /// special. - /// - public string Format { get; set; } = string.Empty; - - /// - /// The translator, can be comma separated. This is part of ComicInfo.xml draft v2.1 - /// - /// See https://github.com/anansi-project/comicinfo/issues/2 for information about this tag - public string Translator { get; set; } = string.Empty; - /// - /// Misc tags. This is part of ComicInfo.xml draft v2.1 - /// - /// See https://github.com/anansi-project/comicinfo/issues/1 for information about this tag - public string Tags { get; set; } = string.Empty; - - /// - /// This is the Author. For Books, we map creator tag in OPF to this field. Comma separated if multiple. - /// - public string Writer { get; set; } = string.Empty; - public string Penciller { get; set; } = string.Empty; - public string Inker { get; set; } = string.Empty; - public string Colorist { get; set; } = string.Empty; - public string Letterer { get; set; } = string.Empty; - public string CoverArtist { get; set; } = string.Empty; - public string Editor { get; set; } = string.Empty; - public string Publisher { get; set; } = string.Empty; - public string Characters { get; set; } = string.Empty; - - public static AgeRating ConvertAgeRatingToEnum(string value) - { - if (string.IsNullOrEmpty(value)) return Entities.Enums.AgeRating.Unknown; - return Enum.GetValues() - .SingleOrDefault(t => t.ToDescription().ToUpperInvariant().Equals(value.ToUpperInvariant()), Entities.Enums.AgeRating.Unknown); - } - - public static void CleanComicInfo(ComicInfo info) - { - if (info == null) return; - - info.Series = info.Series.Trim(); - info.SeriesSort = info.SeriesSort.Trim(); - info.LocalizedSeries = info.LocalizedSeries.Trim(); - - info.Writer = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Writer); - info.Colorist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Colorist); - info.Editor = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Editor); - info.Inker = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Inker); - info.Letterer = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Letterer); - info.Penciller = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Penciller); - info.Publisher = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Publisher); - info.Characters = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Characters); - info.Translator = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Translator); - info.CoverArtist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.CoverArtist); - } - - + if (string.IsNullOrEmpty(value)) return Entities.Enums.AgeRating.Unknown; + return Enum.GetValues() + .SingleOrDefault(t => t.ToDescription().ToUpperInvariant().Equals(value.ToUpperInvariant()), Entities.Enums.AgeRating.Unknown); } + + public static void CleanComicInfo(ComicInfo info) + { + if (info == null) return; + + info.Series = info.Series.Trim(); + info.SeriesSort = info.SeriesSort.Trim(); + info.LocalizedSeries = info.LocalizedSeries.Trim(); + + info.Writer = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Writer); + info.Colorist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Colorist); + info.Editor = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Editor); + info.Inker = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Inker); + info.Letterer = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Letterer); + info.Penciller = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Penciller); + info.Publisher = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Publisher); + info.Characters = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Characters); + info.Translator = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.Translator); + info.CoverArtist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.CoverArtist); + } + + /// + /// Uses both Volume and Number to make an educated guess as to what count refers to and it's highest number. + /// + /// + public int CalculatedCount() + { + if (!string.IsNullOrEmpty(Number) && float.Parse(Number) > 0) + { + return (int) Math.Floor(float.Parse(Number)); + } + if (!string.IsNullOrEmpty(Volume) && float.Parse(Volume) > 0) + { + return Math.Max(Count, (int) Math.Floor(float.Parse(Volume))); + } + + return Count; + } + + } diff --git a/API/Data/MigrateChangeRestrictionRoles.cs b/API/Data/MigrateChangeRestrictionRoles.cs new file mode 100644 index 000000000..25385823b --- /dev/null +++ b/API/Data/MigrateChangeRestrictionRoles.cs @@ -0,0 +1,36 @@ +using System.Threading.Tasks; +using API.Constants; +using API.Entities; +using Microsoft.AspNetCore.Identity; +using Microsoft.Extensions.Logging; + +namespace API.Data; + +/// +/// New role introduced in v0.6. Adds the role to all users. +/// +public static class MigrateChangeRestrictionRoles +{ + /// + /// Will not run if any users have the role already + /// + /// + /// + /// + public static async Task Migrate(IUnitOfWork unitOfWork, UserManager userManager, ILogger logger) + { + var usersWithRole = await userManager.GetUsersInRoleAsync(PolicyConstants.ChangeRestrictionRole); + if (usersWithRole.Count != 0) return; + + logger.LogCritical("Running MigrateChangeRestrictionRoles migration"); + + var allUsers = await unitOfWork.UserRepository.GetAllUsers(); + foreach (var user in allUsers) + { + await userManager.RemoveFromRoleAsync(user, PolicyConstants.ChangeRestrictionRole); + await userManager.AddToRoleAsync(user, PolicyConstants.ChangeRestrictionRole); + } + + logger.LogInformation("MigrateChangeRestrictionRoles migration complete"); + } +} diff --git a/API/Data/MigrateConfigFiles.cs b/API/Data/MigrateConfigFiles.cs deleted file mode 100644 index 51ee37167..000000000 --- a/API/Data/MigrateConfigFiles.cs +++ /dev/null @@ -1,168 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.IO.Abstractions; -using System.Linq; -using API.Services; -using Kavita.Common; - -namespace API.Data -{ - /// - /// A Migration to migrate config related files to the config/ directory for installs prior to v0.4.9. - /// - public static class MigrateConfigFiles - { - private static readonly List LooseLeafFiles = new List() - { - "appsettings.json", - "appsettings.Development.json", - "kavita.db", - }; - - private static readonly List AppFolders = new List() - { - "covers", - "stats", - "logs", - "backups", - "cache", - "temp" - }; - - - /// - /// In v0.4.8 we moved all config files to config/ to match with how docker was setup. This will move all config files from current directory - /// to config/ - /// - public static void Migrate(bool isDocker, IDirectoryService directoryService) - { - Console.WriteLine("Checking if migration to config/ is needed"); - - if (isDocker) - { - if (Configuration.LogPath.Contains("config")) - { - Console.WriteLine("Migration to config/ not needed"); - return; - } - - Console.WriteLine( - "Migrating files from pre-v0.4.8. All Kavita config files are now located in config/"); - - CopyAppFolders(directoryService); - DeleteAppFolders(directoryService); - - UpdateConfiguration(); - - Console.WriteLine("Migration complete. All config files are now in config/ directory"); - return; - } - - if (new FileInfo(Configuration.AppSettingsFilename).Exists) - { - Console.WriteLine("Migration to config/ not needed"); - return; - } - - Console.WriteLine( - "Migrating files from pre-v0.4.8. All Kavita config files are now located in config/"); - - Console.WriteLine($"Creating {directoryService.ConfigDirectory}"); - directoryService.ExistOrCreate(directoryService.ConfigDirectory); - - try - { - CopyLooseLeafFiles(directoryService); - - CopyAppFolders(directoryService); - - // Then we need to update the config file to point to the new DB file - UpdateConfiguration(); - } - catch (Exception) - { - Console.WriteLine("There was an exception during migration. Please move everything manually."); - return; - } - - // Finally delete everything in the source directory - Console.WriteLine("Removing old files"); - DeleteLooseFiles(directoryService); - DeleteAppFolders(directoryService); - Console.WriteLine("Removing old files...DONE"); - - Console.WriteLine("Migration complete. All config files are now in config/ directory"); - } - - private static void DeleteAppFolders(IDirectoryService directoryService) - { - foreach (var folderToDelete in AppFolders) - { - if (!new DirectoryInfo(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)).Exists) continue; - - directoryService.ClearAndDeleteDirectory(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)); - } - } - - private static void DeleteLooseFiles(IDirectoryService directoryService) - { - var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file))) - .Where(f => f.Exists); - directoryService.DeleteFiles(configFiles.Select(f => f.FullName)); - } - - private static void CopyAppFolders(IDirectoryService directoryService) - { - Console.WriteLine("Moving folders to config"); - - foreach (var folderToMove in AppFolders) - { - if (new DirectoryInfo(Path.Join(directoryService.ConfigDirectory, folderToMove)).Exists) continue; - - try - { - directoryService.CopyDirectoryToDirectory( - Path.Join(directoryService.FileSystem.Directory.GetCurrentDirectory(), folderToMove), - Path.Join(directoryService.ConfigDirectory, folderToMove)); - } - catch (Exception) - { - /* Swallow Exception */ - } - } - - - Console.WriteLine("Moving folders to config...DONE"); - } - - private static void CopyLooseLeafFiles(IDirectoryService directoryService) - { - var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(directoryService.FileSystem.Directory.GetCurrentDirectory(), file))) - .Where(f => f.Exists); - // First step is to move all the files - Console.WriteLine("Moving files to config/"); - foreach (var fileInfo in configFiles) - { - try - { - fileInfo.CopyTo(Path.Join(directoryService.ConfigDirectory, fileInfo.Name)); - } - catch (Exception) - { - /* Swallow exception when already exists */ - } - } - - Console.WriteLine("Moving files to config...DONE"); - } - - private static void UpdateConfiguration() - { - Console.WriteLine("Updating appsettings.json to new paths"); - Configuration.DatabasePath = "config//kavita.db"; - Configuration.LogPath = "config//logs/kavita.log"; - Console.WriteLine("Updating appsettings.json to new paths...DONE"); - } - } -} diff --git a/API/Data/MigrateCoverImages.cs b/API/Data/MigrateCoverImages.cs deleted file mode 100644 index 9c859e3e4..000000000 --- a/API/Data/MigrateCoverImages.cs +++ /dev/null @@ -1,182 +0,0 @@ -using System; -using System.IO; -using System.Linq; -using System.Threading.Tasks; -using API.Comparators; -using API.Helpers; -using API.Services; -using Microsoft.EntityFrameworkCore; - -namespace API.Data -{ - /// - /// A data structure to migrate Cover Images from byte[] to files. - /// - internal class CoverMigration - { - public string Id { get; set; } - public byte[] CoverImage { get; set; } - public string ParentId { get; set; } - } - - /// - /// In v0.4.6, Cover Images were migrated from byte[] in the DB to external files. This migration handles that work. - /// - public static class MigrateCoverImages - { - private static readonly ChapterSortComparerZeroFirst ChapterSortComparerForInChapterSorting = new (); - - /// - /// Run first. Will extract byte[]s from DB and write them to the cover directory. - /// - public static void ExtractToImages(DbContext context, IDirectoryService directoryService, IImageService imageService) - { - Console.WriteLine("Migrating Cover Images to disk. Expect delay."); - directoryService.ExistOrCreate(directoryService.CoverImageDirectory); - - Console.WriteLine("Extracting cover images for Series"); - var lockedSeries = SqlHelper.RawSqlQuery(context, "Select Id, CoverImage From Series Where CoverImage IS NOT NULL", x => - new CoverMigration() - { - Id = x[0] + string.Empty, - CoverImage = (byte[]) x[1], - ParentId = "0" - }); - foreach (var series in lockedSeries) - { - if (series.CoverImage == null || !series.CoverImage.Any()) continue; - if (File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetSeriesFormat(int.Parse(series.Id))}.png"))) continue; - - try - { - var stream = new MemoryStream(series.CoverImage); - stream.Position = 0; - imageService.WriteCoverThumbnail(stream, ImageService.GetSeriesFormat(int.Parse(series.Id)), directoryService.CoverImageDirectory); - } - catch (Exception e) - { - Console.WriteLine(e); - } - } - - Console.WriteLine("Extracting cover images for Chapters"); - var chapters = SqlHelper.RawSqlQuery(context, "Select Id, CoverImage, VolumeId From Chapter Where CoverImage IS NOT NULL;", x => - new CoverMigration() - { - Id = x[0] + string.Empty, - CoverImage = (byte[]) x[1], - ParentId = x[2] + string.Empty - }); - foreach (var chapter in chapters) - { - if (chapter.CoverImage == null || !chapter.CoverImage.Any()) continue; - if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}.png"))) continue; - - try - { - var stream = new MemoryStream(chapter.CoverImage); - stream.Position = 0; - imageService.WriteCoverThumbnail(stream, $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}", directoryService.CoverImageDirectory); - } - catch (Exception e) - { - Console.WriteLine(e); - } - } - - Console.WriteLine("Extracting cover images for Collection Tags"); - var tags = SqlHelper.RawSqlQuery(context, "Select Id, CoverImage From CollectionTag Where CoverImage IS NOT NULL;", x => - new CoverMigration() - { - Id = x[0] + string.Empty, - CoverImage = (byte[]) x[1] , - ParentId = "0" - }); - foreach (var tag in tags) - { - if (tag.CoverImage == null || !tag.CoverImage.Any()) continue; - if (directoryService.FileSystem.File.Exists(Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}.png"))) continue; - try - { - var stream = new MemoryStream(tag.CoverImage); - stream.Position = 0; - imageService.WriteCoverThumbnail(stream, $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}", directoryService.CoverImageDirectory); - } - catch (Exception e) - { - Console.WriteLine(e); - } - } - } - - /// - /// Run after . Will update the DB with names of files that were extracted. - /// - /// - public static async Task UpdateDatabaseWithImages(DataContext context, IDirectoryService directoryService) - { - Console.WriteLine("Updating Series entities"); - var seriesCovers = await context.Series.Where(s => !string.IsNullOrEmpty(s.CoverImage)).ToListAsync(); - foreach (var series in seriesCovers) - { - if (!directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetSeriesFormat(series.Id)}.png"))) continue; - series.CoverImage = $"{ImageService.GetSeriesFormat(series.Id)}.png"; - } - - await context.SaveChangesAsync(); - - Console.WriteLine("Updating Chapter entities"); - var chapters = await context.Chapter.ToListAsync(); - // ReSharper disable once ForeachCanBePartlyConvertedToQueryUsingAnotherGetEnumerator - foreach (var chapter in chapters) - { - if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png"))) - { - chapter.CoverImage = $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png"; - } - - } - - await context.SaveChangesAsync(); - - Console.WriteLine("Updating Volume entities"); - var volumes = await context.Volume.Include(v => v.Chapters).ToListAsync(); - foreach (var volume in volumes) - { - var firstChapter = volume.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerForInChapterSorting); - if (firstChapter == null) continue; - if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png"))) - { - volume.CoverImage = $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png"; - } - - } - - await context.SaveChangesAsync(); - - Console.WriteLine("Updating Collection Tag entities"); - var tags = await context.CollectionTag.ToListAsync(); - // ReSharper disable once ForeachCanBePartlyConvertedToQueryUsingAnotherGetEnumerator - foreach (var tag in tags) - { - if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, - $"{ImageService.GetCollectionTagFormat(tag.Id)}.png"))) - { - tag.CoverImage = $"{ImageService.GetCollectionTagFormat(tag.Id)}.png"; - } - - } - - await context.SaveChangesAsync(); - - Console.WriteLine("Cover Image Migration completed"); - } - - } -} diff --git a/API/Data/MigrateNormalizedEverything.cs b/API/Data/MigrateNormalizedEverything.cs new file mode 100644 index 000000000..675620225 --- /dev/null +++ b/API/Data/MigrateNormalizedEverything.cs @@ -0,0 +1,120 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Data; + +/// +/// v0.6.0 introduced a change in how Normalization works and hence every normalized field needs to be re-calculated +/// +public static class MigrateNormalizedEverything +{ + public static async Task Migrate(IUnitOfWork unitOfWork, DataContext dataContext, ILogger logger) + { + // if current version is > 0.5.6.5, then we can exit and not perform + var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + if (Version.Parse(settings.InstallVersion) > new Version(0, 5, 6, 5)) + { + return; + } + logger.LogCritical("Running MigrateNormalizedEverything migration. Please be patient, this may take some time depending on the size of your library. Do not abort, this can break your Database"); + + logger.LogInformation("Updating Normalization on Series..."); + foreach (var series in await dataContext.Series.ToListAsync()) + { + series.NormalizedLocalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.LocalizedName ?? string.Empty); + series.NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name ?? string.Empty); + logger.LogInformation("Updated Series: {SeriesName}", series.Name); + unitOfWork.SeriesRepository.Update(series); + } + + if (unitOfWork.HasChanges()) + { + await unitOfWork.CommitAsync(); + } + logger.LogInformation("Updating Normalization on Series...Done"); + + // Genres + logger.LogInformation("Updating Normalization on Genres..."); + foreach (var genre in await dataContext.Genre.ToListAsync()) + { + genre.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(genre.Title ?? string.Empty); + logger.LogInformation("Updated Genre: {Genre}", genre.Title); + unitOfWork.GenreRepository.Attach(genre); + } + + if (unitOfWork.HasChanges()) + { + await unitOfWork.CommitAsync(); + } + logger.LogInformation("Updating Normalization on Genres...Done"); + + // Tags + logger.LogInformation("Updating Normalization on Tags..."); + foreach (var tag in await dataContext.Tag.ToListAsync()) + { + tag.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(tag.Title ?? string.Empty); + logger.LogInformation("Updated Tag: {Tag}", tag.Title); + unitOfWork.TagRepository.Attach(tag); + } + + if (unitOfWork.HasChanges()) + { + await unitOfWork.CommitAsync(); + } + logger.LogInformation("Updating Normalization on Tags...Done"); + + // People + logger.LogInformation("Updating Normalization on People..."); + foreach (var person in await dataContext.Person.ToListAsync()) + { + person.NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(person.Name ?? string.Empty); + logger.LogInformation("Updated Person: {Person}", person.Name); + unitOfWork.PersonRepository.Attach(person); + } + + if (unitOfWork.HasChanges()) + { + await unitOfWork.CommitAsync(); + } + logger.LogInformation("Updating Normalization on People...Done"); + + // Collections + logger.LogInformation("Updating Normalization on Collections..."); + foreach (var collection in await dataContext.CollectionTag.ToListAsync()) + { + collection.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(collection.Title ?? string.Empty); + logger.LogInformation("Updated Collection: {Collection}", collection.Title); + unitOfWork.CollectionTagRepository.Update(collection); + } + + if (unitOfWork.HasChanges()) + { + await unitOfWork.CommitAsync(); + } + logger.LogInformation("Updating Normalization on Collections...Done"); + + // Reading Lists + logger.LogInformation("Updating Normalization on Reading Lists..."); + foreach (var readingList in await dataContext.ReadingList.ToListAsync()) + { + readingList.NormalizedTitle = Services.Tasks.Scanner.Parser.Parser.Normalize(readingList.Title ?? string.Empty); + logger.LogInformation("Updated Reading List: {ReadingList}", readingList.Title); + unitOfWork.ReadingListRepository.Update(readingList); + } + + if (unitOfWork.HasChanges()) + { + await unitOfWork.CommitAsync(); + } + logger.LogInformation("Updating Normalization on Reading Lists...Done"); + + + logger.LogInformation("MigrateNormalizedEverything migration finished"); + + } + +} diff --git a/API/Data/MigrateReadingListAgeRating.cs b/API/Data/MigrateReadingListAgeRating.cs new file mode 100644 index 000000000..cc1ddfc3d --- /dev/null +++ b/API/Data/MigrateReadingListAgeRating.cs @@ -0,0 +1,42 @@ +using System; +using System.Threading.Tasks; +using API.Constants; +using API.Services; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; +using SQLitePCL; + +namespace API.Data; + +/// +/// New role introduced in v0.6. Calculates the Age Rating on all Reading Lists +/// +public static class MigrateReadingListAgeRating +{ + /// + /// Will not run if any above v0.5.6.24 or v0.6.0 + /// + /// + /// + /// + /// + public static async Task Migrate(IUnitOfWork unitOfWork, DataContext context, IReadingListService readingListService, ILogger logger) + { + var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + if (Version.Parse(settings.InstallVersion) > new Version(0, 5, 6, 26)) + { + return; + } + + logger.LogInformation("MigrateReadingListAgeRating migration starting"); + var readingLists = await context.ReadingList.Include(r => r.Items).ToListAsync(); + foreach (var readingList in readingLists) + { + await readingListService.CalculateReadingListAgeRating(readingList); + context.ReadingList.Update(readingList); + } + + await context.SaveChangesAsync(); + logger.LogInformation("MigrateReadingListAgeRating migration complete"); + } +} diff --git a/API/Data/Migrations/20220921023455_DeviceSupport.Designer.cs b/API/Data/Migrations/20220921023455_DeviceSupport.Designer.cs new file mode 100644 index 000000000..dbf4a0af6 --- /dev/null +++ b/API/Data/Migrations/20220921023455_DeviceSupport.Designer.cs @@ -0,0 +1,1658 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20220921023455_DeviceSupport")] + partial class DeviceSupport + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ApiKey") + .HasColumnType("TEXT"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("ConfirmationToken") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Page") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserBookmark"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BackgroundColor") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("#000000"); + + b.Property("BlurUnreadSummaries") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderImmersiveMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLayoutMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("BookThemeName") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("Dark"); + + b.Property("GlobalPageLayoutMode") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LayoutMode") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("PromptForDownloadSize") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("ShowScreenHints") + .HasColumnType("INTEGER"); + + b.Property("ThemeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.HasIndex("ThemeId"); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Count") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("ReleaseDate") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("TitleName") + .HasColumnType("TEXT"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Genre", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Genre"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastFileAnalysis") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AgeRatingLocked") + .HasColumnType("INTEGER"); + + b.Property("CharacterLocked") + .HasColumnType("INTEGER"); + + b.Property("ColoristLocked") + .HasColumnType("INTEGER"); + + b.Property("CoverArtistLocked") + .HasColumnType("INTEGER"); + + b.Property("EditorLocked") + .HasColumnType("INTEGER"); + + b.Property("GenresLocked") + .HasColumnType("INTEGER"); + + b.Property("InkerLocked") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LanguageLocked") + .HasColumnType("INTEGER"); + + b.Property("LettererLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxCount") + .HasColumnType("INTEGER"); + + b.Property("PencillerLocked") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatus") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatusLocked") + .HasColumnType("INTEGER"); + + b.Property("PublisherLocked") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYear") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("SummaryLocked") + .HasColumnType("INTEGER"); + + b.Property("TagsLocked") + .HasColumnType("INTEGER"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("TranslatorLocked") + .HasColumnType("INTEGER"); + + b.Property("WriterLocked") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RelationKind") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("TargetSeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.HasIndex("TargetSeriesId"); + + b.ToTable("SeriesRelation"); + }); + + modelBuilder.Entity("API.Entities.Person", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Role") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Person"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("ReadingList"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Order") + .HasColumnType("INTEGER"); + + b.Property("ReadingListId") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.HasIndex("ReadingListId"); + + b.HasIndex("SeriesId"); + + b.HasIndex("VolumeId"); + + b.ToTable("ReadingListItem"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FolderPath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastChapterAdded") + .HasColumnType("TEXT"); + + b.Property("LastFolderScanned") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("LocalizedNameLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NameLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedLocalizedName") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("SortNameLocked") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.SiteTheme", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("IsDefault") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Provider") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("SiteTheme"); + }); + + modelBuilder.Entity("API.Entities.Tag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Tag"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "GenresId"); + + b.HasIndex("GenresId"); + + b.ToTable("ChapterGenre"); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.Property("ChapterMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.HasKey("ChapterMetadatasId", "PeopleId"); + + b.HasIndex("PeopleId"); + + b.ToTable("ChapterPerson"); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("ChapterTag"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("GenresId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("GenreSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens", (string)null); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("PeopleId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("PersonSeriesMetadata"); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("SeriesMetadatasId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("SeriesMetadataTag"); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Bookmarks") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SiteTheme", "Theme") + .WithMany() + .HasForeignKey("ThemeId"); + + b.Navigation("AppUser"); + + b.Navigation("Theme"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Progress") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Ratings") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.Metadata.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Relations") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "TargetSeries") + .WithMany("RelationOf") + .HasForeignKey("TargetSeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.Navigation("Series"); + + b.Navigation("TargetSeries"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("ReadingLists") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany() + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.ReadingList", "ReadingList") + .WithMany("Items") + .HasForeignKey("ReadingListId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "Series") + .WithMany() + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Volume", "Volume") + .WithMany() + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + + b.Navigation("ReadingList"); + + b.Navigation("Series"); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany("WantToRead") + .HasForeignKey("AppUserId"); + + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChapterMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Bookmarks"); + + b.Navigation("Devices"); + + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("ReadingLists"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + + b.Navigation("WantToRead"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Navigation("Items"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Progress"); + + b.Navigation("Ratings"); + + b.Navigation("RelationOf"); + + b.Navigation("Relations"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20220921023455_DeviceSupport.cs b/API/Data/Migrations/20220921023455_DeviceSupport.cs new file mode 100644 index 000000000..7723daa41 --- /dev/null +++ b/API/Data/Migrations/20220921023455_DeviceSupport.cs @@ -0,0 +1,73 @@ +using System; +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace API.Data.Migrations +{ + public partial class DeviceSupport : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropForeignKey( + name: "FK_SeriesRelation_Series_TargetSeriesId", + table: "SeriesRelation"); + + migrationBuilder.CreateTable( + name: "Device", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + IpAddress = table.Column(type: "TEXT", nullable: true), + Name = table.Column(type: "TEXT", nullable: true), + EmailAddress = table.Column(type: "TEXT", nullable: true), + Platform = table.Column(type: "INTEGER", nullable: false), + AppUserId = table.Column(type: "INTEGER", nullable: false), + LastUsed = table.Column(type: "TEXT", nullable: false), + Created = table.Column(type: "TEXT", nullable: false), + LastModified = table.Column(type: "TEXT", nullable: false) + }, + constraints: table => + { + table.PrimaryKey("PK_Device", x => x.Id); + table.ForeignKey( + name: "FK_Device_AspNetUsers_AppUserId", + column: x => x.AppUserId, + principalTable: "AspNetUsers", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + }); + + migrationBuilder.CreateIndex( + name: "IX_Device_AppUserId", + table: "Device", + column: "AppUserId"); + + migrationBuilder.AddForeignKey( + name: "FK_SeriesRelation_Series_TargetSeriesId", + table: "SeriesRelation", + column: "TargetSeriesId", + principalTable: "Series", + principalColumn: "Id"); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropForeignKey( + name: "FK_SeriesRelation_Series_TargetSeriesId", + table: "SeriesRelation"); + + migrationBuilder.DropTable( + name: "Device"); + + migrationBuilder.AddForeignKey( + name: "FK_SeriesRelation_Series_TargetSeriesId", + table: "SeriesRelation", + column: "TargetSeriesId", + principalTable: "Series", + principalColumn: "Id", + onDelete: ReferentialAction.Cascade); + } + } +} diff --git a/API/Data/Migrations/20220926145902_AddNoTransitions.Designer.cs b/API/Data/Migrations/20220926145902_AddNoTransitions.Designer.cs new file mode 100644 index 000000000..af7f8bd07 --- /dev/null +++ b/API/Data/Migrations/20220926145902_AddNoTransitions.Designer.cs @@ -0,0 +1,1661 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20220926145902_AddNoTransitions")] + partial class AddNoTransitions + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ApiKey") + .HasColumnType("TEXT"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("ConfirmationToken") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Page") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserBookmark"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BackgroundColor") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("#000000"); + + b.Property("BlurUnreadSummaries") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderImmersiveMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLayoutMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("BookThemeName") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("Dark"); + + b.Property("GlobalPageLayoutMode") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LayoutMode") + .HasColumnType("INTEGER"); + + b.Property("NoTransitions") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("PromptForDownloadSize") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("ShowScreenHints") + .HasColumnType("INTEGER"); + + b.Property("ThemeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.HasIndex("ThemeId"); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Count") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("ReleaseDate") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("TitleName") + .HasColumnType("TEXT"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Genre", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Genre"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastFileAnalysis") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AgeRatingLocked") + .HasColumnType("INTEGER"); + + b.Property("CharacterLocked") + .HasColumnType("INTEGER"); + + b.Property("ColoristLocked") + .HasColumnType("INTEGER"); + + b.Property("CoverArtistLocked") + .HasColumnType("INTEGER"); + + b.Property("EditorLocked") + .HasColumnType("INTEGER"); + + b.Property("GenresLocked") + .HasColumnType("INTEGER"); + + b.Property("InkerLocked") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LanguageLocked") + .HasColumnType("INTEGER"); + + b.Property("LettererLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxCount") + .HasColumnType("INTEGER"); + + b.Property("PencillerLocked") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatus") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatusLocked") + .HasColumnType("INTEGER"); + + b.Property("PublisherLocked") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYear") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("SummaryLocked") + .HasColumnType("INTEGER"); + + b.Property("TagsLocked") + .HasColumnType("INTEGER"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("TranslatorLocked") + .HasColumnType("INTEGER"); + + b.Property("WriterLocked") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RelationKind") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("TargetSeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.HasIndex("TargetSeriesId"); + + b.ToTable("SeriesRelation"); + }); + + modelBuilder.Entity("API.Entities.Person", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Role") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Person"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("ReadingList"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Order") + .HasColumnType("INTEGER"); + + b.Property("ReadingListId") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.HasIndex("ReadingListId"); + + b.HasIndex("SeriesId"); + + b.HasIndex("VolumeId"); + + b.ToTable("ReadingListItem"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FolderPath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastChapterAdded") + .HasColumnType("TEXT"); + + b.Property("LastFolderScanned") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("LocalizedNameLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NameLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedLocalizedName") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("SortNameLocked") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.SiteTheme", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("IsDefault") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Provider") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("SiteTheme"); + }); + + modelBuilder.Entity("API.Entities.Tag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Tag"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "GenresId"); + + b.HasIndex("GenresId"); + + b.ToTable("ChapterGenre"); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.Property("ChapterMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.HasKey("ChapterMetadatasId", "PeopleId"); + + b.HasIndex("PeopleId"); + + b.ToTable("ChapterPerson"); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("ChapterTag"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("GenresId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("GenreSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens", (string)null); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("PeopleId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("PersonSeriesMetadata"); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("SeriesMetadatasId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("SeriesMetadataTag"); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Bookmarks") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SiteTheme", "Theme") + .WithMany() + .HasForeignKey("ThemeId"); + + b.Navigation("AppUser"); + + b.Navigation("Theme"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Progress") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Ratings") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.Metadata.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Relations") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "TargetSeries") + .WithMany("RelationOf") + .HasForeignKey("TargetSeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.Navigation("Series"); + + b.Navigation("TargetSeries"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("ReadingLists") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany() + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.ReadingList", "ReadingList") + .WithMany("Items") + .HasForeignKey("ReadingListId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "Series") + .WithMany() + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Volume", "Volume") + .WithMany() + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + + b.Navigation("ReadingList"); + + b.Navigation("Series"); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany("WantToRead") + .HasForeignKey("AppUserId"); + + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChapterMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Bookmarks"); + + b.Navigation("Devices"); + + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("ReadingLists"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + + b.Navigation("WantToRead"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Navigation("Items"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Progress"); + + b.Navigation("Ratings"); + + b.Navigation("RelationOf"); + + b.Navigation("Relations"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20220926145902_AddNoTransitions.cs b/API/Data/Migrations/20220926145902_AddNoTransitions.cs new file mode 100644 index 000000000..fcef3979a --- /dev/null +++ b/API/Data/Migrations/20220926145902_AddNoTransitions.cs @@ -0,0 +1,26 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace API.Data.Migrations +{ + public partial class AddNoTransitions : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "NoTransitions", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "NoTransitions", + table: "AppUserPreferences"); + } + } +} diff --git a/API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.Designer.cs b/API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.Designer.cs new file mode 100644 index 000000000..fcc054561 --- /dev/null +++ b/API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.Designer.cs @@ -0,0 +1,1664 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20221006013956_ReleaseYearOnSeriesEdit")] + partial class ReleaseYearOnSeriesEdit + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ApiKey") + .HasColumnType("TEXT"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("ConfirmationToken") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Page") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserBookmark"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BackgroundColor") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("#000000"); + + b.Property("BlurUnreadSummaries") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderImmersiveMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLayoutMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("BookThemeName") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("Dark"); + + b.Property("GlobalPageLayoutMode") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LayoutMode") + .HasColumnType("INTEGER"); + + b.Property("NoTransitions") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("PromptForDownloadSize") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("ShowScreenHints") + .HasColumnType("INTEGER"); + + b.Property("ThemeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.HasIndex("ThemeId"); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Count") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("ReleaseDate") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("TitleName") + .HasColumnType("TEXT"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Genre", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Genre"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastFileAnalysis") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AgeRatingLocked") + .HasColumnType("INTEGER"); + + b.Property("CharacterLocked") + .HasColumnType("INTEGER"); + + b.Property("ColoristLocked") + .HasColumnType("INTEGER"); + + b.Property("CoverArtistLocked") + .HasColumnType("INTEGER"); + + b.Property("EditorLocked") + .HasColumnType("INTEGER"); + + b.Property("GenresLocked") + .HasColumnType("INTEGER"); + + b.Property("InkerLocked") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LanguageLocked") + .HasColumnType("INTEGER"); + + b.Property("LettererLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxCount") + .HasColumnType("INTEGER"); + + b.Property("PencillerLocked") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatus") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatusLocked") + .HasColumnType("INTEGER"); + + b.Property("PublisherLocked") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYear") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYearLocked") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("SummaryLocked") + .HasColumnType("INTEGER"); + + b.Property("TagsLocked") + .HasColumnType("INTEGER"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("TranslatorLocked") + .HasColumnType("INTEGER"); + + b.Property("WriterLocked") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RelationKind") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("TargetSeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.HasIndex("TargetSeriesId"); + + b.ToTable("SeriesRelation"); + }); + + modelBuilder.Entity("API.Entities.Person", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Role") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Person"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("ReadingList"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Order") + .HasColumnType("INTEGER"); + + b.Property("ReadingListId") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.HasIndex("ReadingListId"); + + b.HasIndex("SeriesId"); + + b.HasIndex("VolumeId"); + + b.ToTable("ReadingListItem"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FolderPath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastChapterAdded") + .HasColumnType("TEXT"); + + b.Property("LastFolderScanned") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("LocalizedNameLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NameLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedLocalizedName") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("SortNameLocked") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.SiteTheme", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("IsDefault") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Provider") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("SiteTheme"); + }); + + modelBuilder.Entity("API.Entities.Tag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Tag"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "GenresId"); + + b.HasIndex("GenresId"); + + b.ToTable("ChapterGenre"); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.Property("ChapterMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.HasKey("ChapterMetadatasId", "PeopleId"); + + b.HasIndex("PeopleId"); + + b.ToTable("ChapterPerson"); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("ChapterTag"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("GenresId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("GenreSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens", (string)null); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("PeopleId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("PersonSeriesMetadata"); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("SeriesMetadatasId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("SeriesMetadataTag"); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Bookmarks") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SiteTheme", "Theme") + .WithMany() + .HasForeignKey("ThemeId"); + + b.Navigation("AppUser"); + + b.Navigation("Theme"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Progress") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Ratings") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.Metadata.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Relations") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "TargetSeries") + .WithMany("RelationOf") + .HasForeignKey("TargetSeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.Navigation("Series"); + + b.Navigation("TargetSeries"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("ReadingLists") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany() + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.ReadingList", "ReadingList") + .WithMany("Items") + .HasForeignKey("ReadingListId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "Series") + .WithMany() + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Volume", "Volume") + .WithMany() + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + + b.Navigation("ReadingList"); + + b.Navigation("Series"); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany("WantToRead") + .HasForeignKey("AppUserId"); + + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChapterMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Bookmarks"); + + b.Navigation("Devices"); + + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("ReadingLists"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + + b.Navigation("WantToRead"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Navigation("Items"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Progress"); + + b.Navigation("Ratings"); + + b.Navigation("RelationOf"); + + b.Navigation("Relations"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.cs b/API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.cs new file mode 100644 index 000000000..e96557e4e --- /dev/null +++ b/API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.cs @@ -0,0 +1,26 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace API.Data.Migrations +{ + public partial class ReleaseYearOnSeriesEdit : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "ReleaseYearLocked", + table: "SeriesMetadata", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "ReleaseYearLocked", + table: "SeriesMetadata"); + } + } +} diff --git a/API/Data/Migrations/20221009172653_ReadingListAgeRating.Designer.cs b/API/Data/Migrations/20221009172653_ReadingListAgeRating.Designer.cs new file mode 100644 index 000000000..f93e7a58d --- /dev/null +++ b/API/Data/Migrations/20221009172653_ReadingListAgeRating.Designer.cs @@ -0,0 +1,1667 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20221009172653_ReadingListAgeRating")] + partial class ReadingListAgeRating + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ApiKey") + .HasColumnType("TEXT"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("ConfirmationToken") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Page") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserBookmark"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BackgroundColor") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("#000000"); + + b.Property("BlurUnreadSummaries") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderImmersiveMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLayoutMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("BookThemeName") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("Dark"); + + b.Property("GlobalPageLayoutMode") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LayoutMode") + .HasColumnType("INTEGER"); + + b.Property("NoTransitions") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("PromptForDownloadSize") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("ShowScreenHints") + .HasColumnType("INTEGER"); + + b.Property("ThemeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.HasIndex("ThemeId"); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Count") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("ReleaseDate") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("TitleName") + .HasColumnType("TEXT"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Genre", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Genre"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastFileAnalysis") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AgeRatingLocked") + .HasColumnType("INTEGER"); + + b.Property("CharacterLocked") + .HasColumnType("INTEGER"); + + b.Property("ColoristLocked") + .HasColumnType("INTEGER"); + + b.Property("CoverArtistLocked") + .HasColumnType("INTEGER"); + + b.Property("EditorLocked") + .HasColumnType("INTEGER"); + + b.Property("GenresLocked") + .HasColumnType("INTEGER"); + + b.Property("InkerLocked") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LanguageLocked") + .HasColumnType("INTEGER"); + + b.Property("LettererLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxCount") + .HasColumnType("INTEGER"); + + b.Property("PencillerLocked") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatus") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatusLocked") + .HasColumnType("INTEGER"); + + b.Property("PublisherLocked") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYear") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYearLocked") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("SummaryLocked") + .HasColumnType("INTEGER"); + + b.Property("TagsLocked") + .HasColumnType("INTEGER"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("TranslatorLocked") + .HasColumnType("INTEGER"); + + b.Property("WriterLocked") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RelationKind") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("TargetSeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.HasIndex("TargetSeriesId"); + + b.ToTable("SeriesRelation"); + }); + + modelBuilder.Entity("API.Entities.Person", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Role") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Person"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("ReadingList"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Order") + .HasColumnType("INTEGER"); + + b.Property("ReadingListId") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.HasIndex("ReadingListId"); + + b.HasIndex("SeriesId"); + + b.HasIndex("VolumeId"); + + b.ToTable("ReadingListItem"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FolderPath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastChapterAdded") + .HasColumnType("TEXT"); + + b.Property("LastFolderScanned") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("LocalizedNameLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NameLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedLocalizedName") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("SortNameLocked") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.SiteTheme", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("IsDefault") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Provider") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("SiteTheme"); + }); + + modelBuilder.Entity("API.Entities.Tag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Tag"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "GenresId"); + + b.HasIndex("GenresId"); + + b.ToTable("ChapterGenre"); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.Property("ChapterMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.HasKey("ChapterMetadatasId", "PeopleId"); + + b.HasIndex("PeopleId"); + + b.ToTable("ChapterPerson"); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("ChapterTag"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("GenresId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("GenreSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens", (string)null); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("PeopleId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("PersonSeriesMetadata"); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("SeriesMetadatasId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("SeriesMetadataTag"); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Bookmarks") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SiteTheme", "Theme") + .WithMany() + .HasForeignKey("ThemeId"); + + b.Navigation("AppUser"); + + b.Navigation("Theme"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Progress") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Ratings") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.Metadata.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Relations") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "TargetSeries") + .WithMany("RelationOf") + .HasForeignKey("TargetSeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.Navigation("Series"); + + b.Navigation("TargetSeries"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("ReadingLists") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany() + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.ReadingList", "ReadingList") + .WithMany("Items") + .HasForeignKey("ReadingListId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "Series") + .WithMany() + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Volume", "Volume") + .WithMany() + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + + b.Navigation("ReadingList"); + + b.Navigation("Series"); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany("WantToRead") + .HasForeignKey("AppUserId"); + + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChapterMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Bookmarks"); + + b.Navigation("Devices"); + + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("ReadingLists"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + + b.Navigation("WantToRead"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Navigation("Items"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Progress"); + + b.Navigation("Ratings"); + + b.Navigation("RelationOf"); + + b.Navigation("Relations"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20221009172653_ReadingListAgeRating.cs b/API/Data/Migrations/20221009172653_ReadingListAgeRating.cs new file mode 100644 index 000000000..dfc69a9cf --- /dev/null +++ b/API/Data/Migrations/20221009172653_ReadingListAgeRating.cs @@ -0,0 +1,26 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace API.Data.Migrations +{ + public partial class ReadingListAgeRating : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "AgeRating", + table: "ReadingList", + type: "INTEGER", + nullable: false, + defaultValue: 0); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "AgeRating", + table: "ReadingList"); + } + } +} diff --git a/API/Data/Migrations/20221009211237_UserAgeRating.Designer.cs b/API/Data/Migrations/20221009211237_UserAgeRating.Designer.cs new file mode 100644 index 000000000..1a9e9fade --- /dev/null +++ b/API/Data/Migrations/20221009211237_UserAgeRating.Designer.cs @@ -0,0 +1,1670 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20221009211237_UserAgeRating")] + partial class UserAgeRating + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("AgeRestriction") + .HasColumnType("INTEGER"); + + b.Property("ApiKey") + .HasColumnType("TEXT"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("ConfirmationToken") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Page") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserBookmark"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BackgroundColor") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("#000000"); + + b.Property("BlurUnreadSummaries") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderImmersiveMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLayoutMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("BookThemeName") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("Dark"); + + b.Property("GlobalPageLayoutMode") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LayoutMode") + .HasColumnType("INTEGER"); + + b.Property("NoTransitions") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("PromptForDownloadSize") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("ShowScreenHints") + .HasColumnType("INTEGER"); + + b.Property("ThemeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.HasIndex("ThemeId"); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Count") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("ReleaseDate") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("TitleName") + .HasColumnType("TEXT"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Genre", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Genre"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastFileAnalysis") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AgeRatingLocked") + .HasColumnType("INTEGER"); + + b.Property("CharacterLocked") + .HasColumnType("INTEGER"); + + b.Property("ColoristLocked") + .HasColumnType("INTEGER"); + + b.Property("CoverArtistLocked") + .HasColumnType("INTEGER"); + + b.Property("EditorLocked") + .HasColumnType("INTEGER"); + + b.Property("GenresLocked") + .HasColumnType("INTEGER"); + + b.Property("InkerLocked") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LanguageLocked") + .HasColumnType("INTEGER"); + + b.Property("LettererLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxCount") + .HasColumnType("INTEGER"); + + b.Property("PencillerLocked") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatus") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatusLocked") + .HasColumnType("INTEGER"); + + b.Property("PublisherLocked") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYear") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYearLocked") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("SummaryLocked") + .HasColumnType("INTEGER"); + + b.Property("TagsLocked") + .HasColumnType("INTEGER"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("TranslatorLocked") + .HasColumnType("INTEGER"); + + b.Property("WriterLocked") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RelationKind") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("TargetSeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.HasIndex("TargetSeriesId"); + + b.ToTable("SeriesRelation"); + }); + + modelBuilder.Entity("API.Entities.Person", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Role") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Person"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("ReadingList"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Order") + .HasColumnType("INTEGER"); + + b.Property("ReadingListId") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.HasIndex("ReadingListId"); + + b.HasIndex("SeriesId"); + + b.HasIndex("VolumeId"); + + b.ToTable("ReadingListItem"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FolderPath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastChapterAdded") + .HasColumnType("TEXT"); + + b.Property("LastFolderScanned") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("LocalizedNameLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NameLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedLocalizedName") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("SortNameLocked") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.SiteTheme", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("IsDefault") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Provider") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("SiteTheme"); + }); + + modelBuilder.Entity("API.Entities.Tag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Tag"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "GenresId"); + + b.HasIndex("GenresId"); + + b.ToTable("ChapterGenre"); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.Property("ChapterMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.HasKey("ChapterMetadatasId", "PeopleId"); + + b.HasIndex("PeopleId"); + + b.ToTable("ChapterPerson"); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("ChapterTag"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("GenresId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("GenreSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens", (string)null); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("PeopleId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("PersonSeriesMetadata"); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("SeriesMetadatasId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("SeriesMetadataTag"); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Bookmarks") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SiteTheme", "Theme") + .WithMany() + .HasForeignKey("ThemeId"); + + b.Navigation("AppUser"); + + b.Navigation("Theme"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Progress") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Ratings") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.Metadata.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Relations") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "TargetSeries") + .WithMany("RelationOf") + .HasForeignKey("TargetSeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.Navigation("Series"); + + b.Navigation("TargetSeries"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("ReadingLists") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany() + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.ReadingList", "ReadingList") + .WithMany("Items") + .HasForeignKey("ReadingListId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "Series") + .WithMany() + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Volume", "Volume") + .WithMany() + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + + b.Navigation("ReadingList"); + + b.Navigation("Series"); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany("WantToRead") + .HasForeignKey("AppUserId"); + + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChapterMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Bookmarks"); + + b.Navigation("Devices"); + + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("ReadingLists"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + + b.Navigation("WantToRead"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Navigation("Items"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Progress"); + + b.Navigation("Ratings"); + + b.Navigation("RelationOf"); + + b.Navigation("Relations"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20221009211237_UserAgeRating.cs b/API/Data/Migrations/20221009211237_UserAgeRating.cs new file mode 100644 index 000000000..a619255ef --- /dev/null +++ b/API/Data/Migrations/20221009211237_UserAgeRating.cs @@ -0,0 +1,27 @@ +using API.Entities.Enums; +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace API.Data.Migrations +{ + public partial class UserAgeRating : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "AgeRestriction", + table: "AspNetUsers", + type: "INTEGER", + nullable: false, + defaultValue: AgeRating.NotApplicable); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "AgeRestriction", + table: "AspNetUsers"); + } + } +} diff --git a/API/Data/Migrations/20221017131711_IncludeUnknowns.Designer.cs b/API/Data/Migrations/20221017131711_IncludeUnknowns.Designer.cs new file mode 100644 index 000000000..9ad6b3542 --- /dev/null +++ b/API/Data/Migrations/20221017131711_IncludeUnknowns.Designer.cs @@ -0,0 +1,1673 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +#nullable disable + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20221017131711_IncludeUnknowns")] + partial class IncludeUnknowns + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("AgeRestriction") + .HasColumnType("INTEGER"); + + b.Property("AgeRestrictionIncludeUnknowns") + .HasColumnType("INTEGER"); + + b.Property("ApiKey") + .HasColumnType("TEXT"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("ConfirmationToken") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers", (string)null); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Page") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserBookmark"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BackgroundColor") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("#000000"); + + b.Property("BlurUnreadSummaries") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderImmersiveMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLayoutMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("BookThemeName") + .ValueGeneratedOnAdd() + .HasColumnType("TEXT") + .HasDefaultValue("Dark"); + + b.Property("GlobalPageLayoutMode") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER") + .HasDefaultValue(0); + + b.Property("LayoutMode") + .HasColumnType("INTEGER"); + + b.Property("NoTransitions") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("PromptForDownloadSize") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("ShowScreenHints") + .HasColumnType("INTEGER"); + + b.Property("ThemeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.HasIndex("ThemeId"); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("SeriesId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles", (string)null); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Count") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("ReleaseDate") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("TitleName") + .HasColumnType("TEXT"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Genre", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Genre"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastFileAnalysis") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AgeRatingLocked") + .HasColumnType("INTEGER"); + + b.Property("CharacterLocked") + .HasColumnType("INTEGER"); + + b.Property("ColoristLocked") + .HasColumnType("INTEGER"); + + b.Property("CoverArtistLocked") + .HasColumnType("INTEGER"); + + b.Property("EditorLocked") + .HasColumnType("INTEGER"); + + b.Property("GenresLocked") + .HasColumnType("INTEGER"); + + b.Property("InkerLocked") + .HasColumnType("INTEGER"); + + b.Property("Language") + .HasColumnType("TEXT"); + + b.Property("LanguageLocked") + .HasColumnType("INTEGER"); + + b.Property("LettererLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxCount") + .HasColumnType("INTEGER"); + + b.Property("PencillerLocked") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatus") + .HasColumnType("INTEGER"); + + b.Property("PublicationStatusLocked") + .HasColumnType("INTEGER"); + + b.Property("PublisherLocked") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYear") + .HasColumnType("INTEGER"); + + b.Property("ReleaseYearLocked") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("SummaryLocked") + .HasColumnType("INTEGER"); + + b.Property("TagsLocked") + .HasColumnType("INTEGER"); + + b.Property("TotalCount") + .HasColumnType("INTEGER"); + + b.Property("TranslatorLocked") + .HasColumnType("INTEGER"); + + b.Property("WriterLocked") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RelationKind") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("TargetSeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.HasIndex("TargetSeriesId"); + + b.ToTable("SeriesRelation"); + }); + + modelBuilder.Entity("API.Entities.Person", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Role") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Person"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AgeRating") + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("ReadingList"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Order") + .HasColumnType("INTEGER"); + + b.Property("ReadingListId") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.HasIndex("ReadingListId"); + + b.HasIndex("SeriesId"); + + b.HasIndex("VolumeId"); + + b.ToTable("ReadingListItem"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("CoverImageLocked") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FolderPath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastChapterAdded") + .HasColumnType("TEXT"); + + b.Property("LastFolderScanned") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("LocalizedNameLocked") + .HasColumnType("INTEGER"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NameLocked") + .HasColumnType("INTEGER"); + + b.Property("NormalizedLocalizedName") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("SortNameLocked") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.HasIndex("LibraryId"); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.SiteTheme", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("FileName") + .HasColumnType("TEXT"); + + b.Property("IsDefault") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("Provider") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("SiteTheme"); + }); + + modelBuilder.Entity("API.Entities.Tag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ExternalTag") + .HasColumnType("INTEGER"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedTitle", "ExternalTag") + .IsUnique(); + + b.ToTable("Tag"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AvgHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("MaxHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("MinHoursToRead") + .HasColumnType("INTEGER"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("WordCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "GenresId"); + + b.HasIndex("GenresId"); + + b.ToTable("ChapterGenre"); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.Property("ChapterMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.HasKey("ChapterMetadatasId", "PeopleId"); + + b.HasIndex("PeopleId"); + + b.ToTable("ChapterPerson"); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.Property("ChaptersId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("ChaptersId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("ChapterTag"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.Property("GenresId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("GenresId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("GenreSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins", (string)null); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens", (string)null); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.Property("PeopleId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("PeopleId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("PersonSeriesMetadata"); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.Property("TagsId") + .HasColumnType("INTEGER"); + + b.HasKey("SeriesMetadatasId", "TagsId"); + + b.HasIndex("TagsId"); + + b.ToTable("SeriesMetadataTag"); + }); + + modelBuilder.Entity("API.Entities.AppUserBookmark", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Bookmarks") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SiteTheme", "Theme") + .WithMany() + .HasForeignKey("ThemeId"); + + b.Navigation("AppUser"); + + b.Navigation("Theme"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Progress") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", null) + .WithMany("Ratings") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.Metadata.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Relations") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "TargetSeries") + .WithMany("RelationOf") + .HasForeignKey("TargetSeriesId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired(); + + b.Navigation("Series"); + + b.Navigation("TargetSeries"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("ReadingLists") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.ReadingListItem", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany() + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.ReadingList", "ReadingList") + .WithMany("Items") + .HasForeignKey("ReadingListId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Series", "Series") + .WithMany() + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Volume", "Volume") + .WithMany() + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + + b.Navigation("ReadingList"); + + b.Navigation("Series"); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany("WantToRead") + .HasForeignKey("AppUserId"); + + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterGenre", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterPerson", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChapterMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("ChapterTag", b => + { + b.HasOne("API.Entities.Chapter", null) + .WithMany() + .HasForeignKey("ChaptersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("GenreSeriesMetadata", b => + { + b.HasOne("API.Entities.Genre", null) + .WithMany() + .HasForeignKey("GenresId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("PersonSeriesMetadata", b => + { + b.HasOne("API.Entities.Person", null) + .WithMany() + .HasForeignKey("PeopleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("SeriesMetadataTag", b => + { + b.HasOne("API.Entities.Metadata.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Tag", null) + .WithMany() + .HasForeignKey("TagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Bookmarks"); + + b.Navigation("Devices"); + + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("ReadingLists"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + + b.Navigation("WantToRead"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.ReadingList", b => + { + b.Navigation("Items"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Progress"); + + b.Navigation("Ratings"); + + b.Navigation("RelationOf"); + + b.Navigation("Relations"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20221017131711_IncludeUnknowns.cs b/API/Data/Migrations/20221017131711_IncludeUnknowns.cs new file mode 100644 index 000000000..34c0dfd9e --- /dev/null +++ b/API/Data/Migrations/20221017131711_IncludeUnknowns.cs @@ -0,0 +1,26 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace API.Data.Migrations +{ + public partial class IncludeUnknowns : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "AgeRestrictionIncludeUnknowns", + table: "AspNetUsers", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "AgeRestrictionIncludeUnknowns", + table: "AspNetUsers"); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index d65cc4adb..ca7164702 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -15,7 +15,7 @@ namespace API.Data.Migrations protected override void BuildModel(ModelBuilder modelBuilder) { #pragma warning disable 612, 618 - modelBuilder.HasAnnotation("ProductVersion", "6.0.7"); + modelBuilder.HasAnnotation("ProductVersion", "6.0.9"); modelBuilder.Entity("API.Entities.AppRole", b => { @@ -53,6 +53,12 @@ namespace API.Data.Migrations b.Property("AccessFailedCount") .HasColumnType("INTEGER"); + b.Property("AgeRestriction") + .HasColumnType("INTEGER"); + + b.Property("AgeRestrictionIncludeUnknowns") + .HasColumnType("INTEGER"); + b.Property("ApiKey") .HasColumnType("TEXT"); @@ -159,7 +165,7 @@ namespace API.Data.Migrations b.HasIndex("AppUserId"); - b.ToTable("AppUserBookmark"); + b.ToTable("AppUserBookmark", (string)null); }); modelBuilder.Entity("API.Entities.AppUserPreferences", b => @@ -219,6 +225,9 @@ namespace API.Data.Migrations b.Property("LayoutMode") .HasColumnType("INTEGER"); + b.Property("NoTransitions") + .HasColumnType("INTEGER"); + b.Property("PageSplitOption") .HasColumnType("INTEGER"); @@ -247,7 +256,7 @@ namespace API.Data.Migrations b.HasIndex("ThemeId"); - b.ToTable("AppUserPreferences"); + b.ToTable("AppUserPreferences", (string)null); }); modelBuilder.Entity("API.Entities.AppUserProgress", b => @@ -286,7 +295,7 @@ namespace API.Data.Migrations b.HasIndex("SeriesId"); - b.ToTable("AppUserProgresses"); + b.ToTable("AppUserProgresses", (string)null); }); modelBuilder.Entity("API.Entities.AppUserRating", b => @@ -313,7 +322,7 @@ namespace API.Data.Migrations b.HasIndex("SeriesId"); - b.ToTable("AppUserRating"); + b.ToTable("AppUserRating", (string)null); }); modelBuilder.Entity("API.Entities.AppUserRole", b => @@ -404,7 +413,7 @@ namespace API.Data.Migrations b.HasIndex("VolumeId"); - b.ToTable("Chapter"); + b.ToTable("Chapter", (string)null); }); modelBuilder.Entity("API.Entities.CollectionTag", b => @@ -439,7 +448,44 @@ namespace API.Data.Migrations b.HasIndex("Id", "Promoted") .IsUnique(); - b.ToTable("CollectionTag"); + b.ToTable("CollectionTag", (string)null); + }); + + modelBuilder.Entity("API.Entities.Device", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("EmailAddress") + .HasColumnType("TEXT"); + + b.Property("IpAddress") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LastUsed") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Platform") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("Device", (string)null); }); modelBuilder.Entity("API.Entities.FolderPath", b => @@ -461,7 +507,7 @@ namespace API.Data.Migrations b.HasIndex("LibraryId"); - b.ToTable("FolderPath"); + b.ToTable("FolderPath", (string)null); }); modelBuilder.Entity("API.Entities.Genre", b => @@ -484,7 +530,7 @@ namespace API.Data.Migrations b.HasIndex("NormalizedTitle", "ExternalTag") .IsUnique(); - b.ToTable("Genre"); + b.ToTable("Genre", (string)null); }); modelBuilder.Entity("API.Entities.Library", b => @@ -513,7 +559,7 @@ namespace API.Data.Migrations b.HasKey("Id"); - b.ToTable("Library"); + b.ToTable("Library", (string)null); }); modelBuilder.Entity("API.Entities.MangaFile", b => @@ -547,7 +593,7 @@ namespace API.Data.Migrations b.HasIndex("ChapterId"); - b.ToTable("MangaFile"); + b.ToTable("MangaFile", (string)null); }); modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b => @@ -607,6 +653,9 @@ namespace API.Data.Migrations b.Property("ReleaseYear") .HasColumnType("INTEGER"); + b.Property("ReleaseYearLocked") + .HasColumnType("INTEGER"); + b.Property("RowVersion") .IsConcurrencyToken() .HasColumnType("INTEGER"); @@ -640,7 +689,7 @@ namespace API.Data.Migrations b.HasIndex("Id", "SeriesId") .IsUnique(); - b.ToTable("SeriesMetadata"); + b.ToTable("SeriesMetadata", (string)null); }); modelBuilder.Entity("API.Entities.Metadata.SeriesRelation", b => @@ -664,7 +713,7 @@ namespace API.Data.Migrations b.HasIndex("TargetSeriesId"); - b.ToTable("SeriesRelation"); + b.ToTable("SeriesRelation", (string)null); }); modelBuilder.Entity("API.Entities.Person", b => @@ -684,7 +733,7 @@ namespace API.Data.Migrations b.HasKey("Id"); - b.ToTable("Person"); + b.ToTable("Person", (string)null); }); modelBuilder.Entity("API.Entities.ReadingList", b => @@ -693,6 +742,9 @@ namespace API.Data.Migrations .ValueGeneratedOnAdd() .HasColumnType("INTEGER"); + b.Property("AgeRating") + .HasColumnType("INTEGER"); + b.Property("AppUserId") .HasColumnType("INTEGER"); @@ -724,7 +776,7 @@ namespace API.Data.Migrations b.HasIndex("AppUserId"); - b.ToTable("ReadingList"); + b.ToTable("ReadingList", (string)null); }); modelBuilder.Entity("API.Entities.ReadingListItem", b => @@ -758,7 +810,7 @@ namespace API.Data.Migrations b.HasIndex("VolumeId"); - b.ToTable("ReadingListItem"); + b.ToTable("ReadingListItem", (string)null); }); modelBuilder.Entity("API.Entities.Series", b => @@ -845,7 +897,7 @@ namespace API.Data.Migrations b.HasIndex("LibraryId"); - b.ToTable("Series"); + b.ToTable("Series", (string)null); }); modelBuilder.Entity("API.Entities.ServerSetting", b => @@ -862,7 +914,7 @@ namespace API.Data.Migrations b.HasKey("Key"); - b.ToTable("ServerSetting"); + b.ToTable("ServerSetting", (string)null); }); modelBuilder.Entity("API.Entities.SiteTheme", b => @@ -894,7 +946,7 @@ namespace API.Data.Migrations b.HasKey("Id"); - b.ToTable("SiteTheme"); + b.ToTable("SiteTheme", (string)null); }); modelBuilder.Entity("API.Entities.Tag", b => @@ -917,7 +969,7 @@ namespace API.Data.Migrations b.HasIndex("NormalizedTitle", "ExternalTag") .IsUnique(); - b.ToTable("Tag"); + b.ToTable("Tag", (string)null); }); modelBuilder.Entity("API.Entities.Volume", b => @@ -963,7 +1015,7 @@ namespace API.Data.Migrations b.HasIndex("SeriesId"); - b.ToTable("Volume"); + b.ToTable("Volume", (string)null); }); modelBuilder.Entity("AppUserLibrary", b => @@ -978,7 +1030,7 @@ namespace API.Data.Migrations b.HasIndex("LibrariesId"); - b.ToTable("AppUserLibrary"); + b.ToTable("AppUserLibrary", (string)null); }); modelBuilder.Entity("ChapterGenre", b => @@ -993,7 +1045,7 @@ namespace API.Data.Migrations b.HasIndex("GenresId"); - b.ToTable("ChapterGenre"); + b.ToTable("ChapterGenre", (string)null); }); modelBuilder.Entity("ChapterPerson", b => @@ -1008,7 +1060,7 @@ namespace API.Data.Migrations b.HasIndex("PeopleId"); - b.ToTable("ChapterPerson"); + b.ToTable("ChapterPerson", (string)null); }); modelBuilder.Entity("ChapterTag", b => @@ -1023,7 +1075,7 @@ namespace API.Data.Migrations b.HasIndex("TagsId"); - b.ToTable("ChapterTag"); + b.ToTable("ChapterTag", (string)null); }); modelBuilder.Entity("CollectionTagSeriesMetadata", b => @@ -1038,7 +1090,7 @@ namespace API.Data.Migrations b.HasIndex("SeriesMetadatasId"); - b.ToTable("CollectionTagSeriesMetadata"); + b.ToTable("CollectionTagSeriesMetadata", (string)null); }); modelBuilder.Entity("GenreSeriesMetadata", b => @@ -1053,7 +1105,7 @@ namespace API.Data.Migrations b.HasIndex("SeriesMetadatasId"); - b.ToTable("GenreSeriesMetadata"); + b.ToTable("GenreSeriesMetadata", (string)null); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => @@ -1152,7 +1204,7 @@ namespace API.Data.Migrations b.HasIndex("SeriesMetadatasId"); - b.ToTable("PersonSeriesMetadata"); + b.ToTable("PersonSeriesMetadata", (string)null); }); modelBuilder.Entity("SeriesMetadataTag", b => @@ -1167,7 +1219,7 @@ namespace API.Data.Migrations b.HasIndex("TagsId"); - b.ToTable("SeriesMetadataTag"); + b.ToTable("SeriesMetadataTag", (string)null); }); modelBuilder.Entity("API.Entities.AppUserBookmark", b => @@ -1262,6 +1314,17 @@ namespace API.Data.Migrations b.Navigation("Volume"); }); + modelBuilder.Entity("API.Entities.Device", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Devices") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + modelBuilder.Entity("API.Entities.FolderPath", b => { b.HasOne("API.Entities.Library", "Library") @@ -1306,7 +1369,7 @@ namespace API.Data.Migrations b.HasOne("API.Entities.Series", "TargetSeries") .WithMany("RelationOf") .HasForeignKey("TargetSeriesId") - .OnDelete(DeleteBehavior.Cascade) + .OnDelete(DeleteBehavior.ClientCascade) .IsRequired(); b.Navigation("Series"); @@ -1551,6 +1614,8 @@ namespace API.Data.Migrations { b.Navigation("Bookmarks"); + b.Navigation("Devices"); + b.Navigation("Progresses"); b.Navigation("Ratings"); diff --git a/API/Data/Misc/AgeRestriction.cs b/API/Data/Misc/AgeRestriction.cs new file mode 100644 index 000000000..90c3c5888 --- /dev/null +++ b/API/Data/Misc/AgeRestriction.cs @@ -0,0 +1,9 @@ +using API.Entities.Enums; + +namespace API.Data.Misc; + +public class AgeRestriction +{ + public AgeRating AgeRating { get; set; } + public bool IncludeUnknowns { get; set; } +} diff --git a/API/Data/Misc/RecentlyAddedSeries.cs b/API/Data/Misc/RecentlyAddedSeries.cs new file mode 100644 index 000000000..24100ca0f --- /dev/null +++ b/API/Data/Misc/RecentlyAddedSeries.cs @@ -0,0 +1,22 @@ +using System; +using API.Entities.Enums; + +namespace API.Data.Misc; + +public class RecentlyAddedSeries +{ + public int LibraryId { get; init; } + public LibraryType LibraryType { get; init; } + public DateTime Created { get; init; } + public int SeriesId { get; init; } + public string SeriesName { get; init; } + public MangaFormat Format { get; init; } + public int ChapterId { get; init; } + public int VolumeId { get; init; } + public string ChapterNumber { get; init; } + public string ChapterRange { get; init; } + public string ChapterTitle { get; init; } + public bool IsSpecial { get; init; } + public int VolumeNumber { get; init; } + public AgeRating AgeRating { get; init; } +} diff --git a/API/Data/Repositories/ChapterRepository.cs b/API/Data/Repositories/ChapterRepository.cs index ab3684fa0..ce65883cc 100644 --- a/API/Data/Repositories/ChapterRepository.cs +++ b/API/Data/Repositories/ChapterRepository.cs @@ -63,7 +63,7 @@ public class ChapterRepository : IChapterRepository .Join(_context.Volume, c => c.VolumeId, v => v.Id, (chapter, volume) => new { ChapterNumber = chapter.Range, - VolumeNumber = volume.Number, + VolumeNumber = volume.Name, VolumeId = volume.Id, chapter.IsSpecial, chapter.TitleName, diff --git a/API/Data/Repositories/CollectionTagRepository.cs b/API/Data/Repositories/CollectionTagRepository.cs index 7b9398b85..a5ea582f3 100644 --- a/API/Data/Repositories/CollectionTagRepository.cs +++ b/API/Data/Repositories/CollectionTagRepository.cs @@ -2,8 +2,10 @@ using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Data.Misc; using API.DTOs.CollectionTags; using API.Entities; +using API.Extensions; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; @@ -15,9 +17,9 @@ public interface ICollectionTagRepository void Add(CollectionTag tag); void Remove(CollectionTag tag); Task> GetAllTagDtosAsync(); - Task> SearchTagDtosAsync(string searchQuery); + Task> SearchTagDtosAsync(string searchQuery, int userId); Task GetCoverImageAsync(int collectionTagId); - Task> GetAllPromotedTagDtosAsync(); + Task> GetAllPromotedTagDtosAsync(int userId); Task GetTagAsync(int tagId); Task GetFullTagAsync(int tagId); void Update(CollectionTag tag); @@ -85,6 +87,7 @@ public class CollectionTagRepository : ICollectionTagRepository public async Task> GetAllTagDtosAsync() { + return await _context.CollectionTag .OrderBy(c => c.NormalizedTitle) .AsNoTracking() @@ -92,10 +95,12 @@ public class CollectionTagRepository : ICollectionTagRepository .ToListAsync(); } - public async Task> GetAllPromotedTagDtosAsync() + public async Task> GetAllPromotedTagDtosAsync(int userId) { + var userRating = await GetUserAgeRestriction(userId); return await _context.CollectionTag .Where(c => c.Promoted) + .RestrictAgainstAgeRestriction(userRating) .OrderBy(c => c.NormalizedTitle) .AsNoTracking() .ProjectTo(_mapper.ConfigurationProvider) @@ -118,11 +123,26 @@ public class CollectionTagRepository : ICollectionTagRepository .SingleOrDefaultAsync(); } - public async Task> SearchTagDtosAsync(string searchQuery) + private async Task GetUserAgeRestriction(int userId) { + return await _context.AppUser + .AsNoTracking() + .Where(u => u.Id == userId) + .Select(u => + new AgeRestriction(){ + AgeRating = u.AgeRestriction, + IncludeUnknowns = u.AgeRestrictionIncludeUnknowns + }) + .SingleAsync(); + } + + public async Task> SearchTagDtosAsync(string searchQuery, int userId) + { + var userRating = await GetUserAgeRestriction(userId); return await _context.CollectionTag .Where(s => EF.Functions.Like(s.Title, $"%{searchQuery}%") || EF.Functions.Like(s.NormalizedTitle, $"%{searchQuery}%")) + .RestrictAgainstAgeRestriction(userRating) .OrderBy(s => s.Title) .AsNoTracking() .OrderBy(c => c.NormalizedTitle) diff --git a/API/Data/Repositories/DeviceRepository.cs b/API/Data/Repositories/DeviceRepository.cs new file mode 100644 index 000000000..b6f139bc1 --- /dev/null +++ b/API/Data/Repositories/DeviceRepository.cs @@ -0,0 +1,50 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using API.DTOs.Device; +using API.Entities; +using AutoMapper; +using AutoMapper.QueryableExtensions; +using Microsoft.EntityFrameworkCore; + +namespace API.Data.Repositories; + +public interface IDeviceRepository +{ + void Update(Device device); + Task> GetDevicesForUserAsync(int userId); + Task GetDeviceById(int deviceId); +} + +public class DeviceRepository : IDeviceRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public DeviceRepository(DataContext context, IMapper mapper) + { + _context = context; + _mapper = mapper; + } + + public void Update(Device device) + { + _context.Entry(device).State = EntityState.Modified; + } + + public async Task> GetDevicesForUserAsync(int userId) + { + return await _context.Device + .Where(d => d.AppUserId == userId) + .OrderBy(d => d.LastUsed) + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + public async Task GetDeviceById(int deviceId) + { + return await _context.Device + .Where(d => d.Id == deviceId) + .SingleOrDefaultAsync(); + } +} diff --git a/API/Data/Repositories/GenreRepository.cs b/API/Data/Repositories/GenreRepository.cs index 7457adb24..df7fb5069 100644 --- a/API/Data/Repositories/GenreRepository.cs +++ b/API/Data/Repositories/GenreRepository.cs @@ -1,8 +1,10 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; +using API.Data.Misc; using API.DTOs.Metadata; using API.Entities; +using API.Extensions; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; @@ -15,9 +17,9 @@ public interface IGenreRepository void Remove(Genre genre); Task FindByNameAsync(string genreName); Task> GetAllGenresAsync(); - Task> GetAllGenreDtosAsync(); + Task> GetAllGenreDtosAsync(int userId); Task RemoveAllGenreNoLongerAssociated(bool removeExternal = false); - Task> GetAllGenreDtosForLibrariesAsync(IList libraryIds); + Task> GetAllGenreDtosForLibrariesAsync(IList libraryIds, int userId); Task GetCountAsync(); } @@ -63,10 +65,18 @@ public class GenreRepository : IGenreRepository await _context.SaveChangesAsync(); } - public async Task> GetAllGenreDtosForLibrariesAsync(IList libraryIds) + /// + /// Returns a set of Genre tags for a set of library Ids. UserId will restrict returned Genres based on user's age restriction. + /// + /// + /// + /// + public async Task> GetAllGenreDtosForLibrariesAsync(IList libraryIds, int userId) { + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); return await _context.Series .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(userRating) .SelectMany(s => s.Metadata.Genres) .AsSplitQuery() .Distinct() @@ -75,6 +85,7 @@ public class GenreRepository : IGenreRepository .ToListAsync(); } + public async Task GetCountAsync() { return await _context.Genre.CountAsync(); @@ -85,9 +96,11 @@ public class GenreRepository : IGenreRepository return await _context.Genre.ToListAsync(); } - public async Task> GetAllGenreDtosAsync() + public async Task> GetAllGenreDtosAsync(int userId) { + var ageRating = await _context.AppUser.GetUserAgeRestriction(userId); return await _context.Genre + .RestrictAgainstAgeRestriction(ageRating) .AsNoTracking() .ProjectTo(_mapper.ConfigurationProvider) .ToListAsync(); diff --git a/API/Data/Repositories/LibraryRepository.cs b/API/Data/Repositories/LibraryRepository.cs index b967cece8..7a50f365e 100644 --- a/API/Data/Repositories/LibraryRepository.cs +++ b/API/Data/Repositories/LibraryRepository.cs @@ -33,17 +33,19 @@ public interface ILibraryRepository void Delete(Library library); Task> GetLibraryDtosAsync(); Task LibraryExists(string libraryName); - Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes); + Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes = LibraryIncludes.None); Task> GetLibraryDtosForUsernameAsync(string userName); Task> GetLibrariesAsync(LibraryIncludes includes = LibraryIncludes.None); Task DeleteLibrary(int libraryId); Task> GetLibrariesForUserIdAsync(int userId); + Task> GetLibraryIdsForUserIdAsync(int userId); Task GetLibraryTypeAsync(int libraryId); Task> GetLibraryForIdsAsync(IEnumerable libraryIds, LibraryIncludes includes = LibraryIncludes.None); Task GetTotalFiles(); IEnumerable GetJumpBarAsync(int libraryId); Task> GetAllAgeRatingsDtosForLibrariesAsync(List libraryIds); Task> GetAllLanguagesForLibrariesAsync(List libraryIds); + Task> GetAllLanguagesForLibrariesAsync(); IEnumerable GetAllPublicationStatusesDtosForLibrariesAsync(List libraryIds); Task DoAnySeriesFoldersMatch(IEnumerable folders); Library GetLibraryByFolder(string folder); @@ -110,6 +112,11 @@ public class LibraryRepository : ILibraryRepository return await _context.SaveChangesAsync() > 0; } + /// + /// This does not track + /// + /// + /// public async Task> GetLibrariesForUserIdAsync(int userId) { return await _context.Library @@ -119,6 +126,14 @@ public class LibraryRepository : ILibraryRepository .ToListAsync(); } + public async Task> GetLibraryIdsForUserIdAsync(int userId) + { + return await _context.Library + .Where(l => l.AppUsers.Select(ap => ap.Id).Contains(userId)) + .Select(l => l.Id) + .ToListAsync(); + } + public async Task GetLibraryTypeAsync(int libraryId) { return await _context.Library @@ -173,6 +188,10 @@ public class LibraryRepository : ILibraryRepository }); } + /// + /// Returns all Libraries with their Folders + /// + /// public async Task> GetLibraryDtosAsync() { return await _context.Library @@ -184,14 +203,14 @@ public class LibraryRepository : ILibraryRepository .ToListAsync(); } - public async Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes) + public async Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes = LibraryIncludes.None) { var query = _context.Library .Where(x => x.Id == libraryId); query = AddIncludesToQuery(query, includes); - return await query.SingleAsync(); + return await query.SingleOrDefaultAsync(); } private static IQueryable AddIncludesToQuery(IQueryable query, LibraryIncludes includeFlags) @@ -311,6 +330,26 @@ public class LibraryRepository : ILibraryRepository .ToList(); } + public async Task> GetAllLanguagesForLibrariesAsync() + { + var ret = await _context.Series + .Select(s => s.Metadata.Language) + .AsSplitQuery() + .AsNoTracking() + .Distinct() + .ToListAsync(); + + return ret + .Where(s => !string.IsNullOrEmpty(s)) + .Select(s => new LanguageDto() + { + Title = CultureInfo.GetCultureInfo(s).DisplayName, + IsoCode = s + }) + .OrderBy(s => s.Title) + .ToList(); + } + public IEnumerable GetAllPublicationStatusesDtosForLibrariesAsync(List libraryIds) { return _context.Series diff --git a/API/Data/Repositories/PersonRepository.cs b/API/Data/Repositories/PersonRepository.cs index 83aa18f62..7eea282a7 100644 --- a/API/Data/Repositories/PersonRepository.cs +++ b/API/Data/Repositories/PersonRepository.cs @@ -3,6 +3,7 @@ using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Extensions; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; @@ -14,8 +15,9 @@ public interface IPersonRepository void Attach(Person person); void Remove(Person person); Task> GetAllPeople(); + Task> GetAllPersonDtosAsync(int userId); Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false); - Task> GetAllPeopleDtosForLibrariesAsync(List libraryIds); + Task> GetAllPeopleDtosForLibrariesAsync(List libraryIds, int userId); Task GetCountAsync(); } @@ -40,14 +42,6 @@ public class PersonRepository : IPersonRepository _context.Person.Remove(person); } - public async Task FindByNameAsync(string name) - { - var normalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(name); - return await _context.Person - .Where(p => normalizedName.Equals(p.NormalizedName)) - .SingleOrDefaultAsync(); - } - public async Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false) { var peopleWithNoConnections = await _context.Person @@ -62,10 +56,12 @@ public class PersonRepository : IPersonRepository await _context.SaveChangesAsync(); } - public async Task> GetAllPeopleDtosForLibrariesAsync(List libraryIds) + public async Task> GetAllPeopleDtosForLibrariesAsync(List libraryIds, int userId) { + var ageRating = await _context.AppUser.GetUserAgeRestriction(userId); return await _context.Series .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(ageRating) .SelectMany(s => s.Metadata.People) .Distinct() .OrderBy(p => p.Name) @@ -87,4 +83,14 @@ public class PersonRepository : IPersonRepository .OrderBy(p => p.Name) .ToListAsync(); } + + public async Task> GetAllPersonDtosAsync(int userId) + { + var ageRating = await _context.AppUser.GetUserAgeRestriction(userId); + return await _context.Person + .OrderBy(p => p.Name) + .RestrictAgainstAgeRestriction(ageRating) + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } } diff --git a/API/Data/Repositories/ReadingListRepository.cs b/API/Data/Repositories/ReadingListRepository.cs index 6fde82929..3401205d1 100644 --- a/API/Data/Repositories/ReadingListRepository.cs +++ b/API/Data/Repositories/ReadingListRepository.cs @@ -23,6 +23,7 @@ public interface IReadingListRepository Task> GetReadingListDtosForSeriesAndUserAsync(int userId, int seriesId, bool includePromoted); void Remove(ReadingListItem item); + void Add(ReadingList list); void BulkRemove(IEnumerable items); void Update(ReadingList list); Task Count(); @@ -46,6 +47,11 @@ public class ReadingListRepository : IReadingListRepository _context.Entry(list).State = EntityState.Modified; } + public void Add(ReadingList list) + { + _context.Add(list); + } + public async Task Count() { return await _context.ReadingList.CountAsync(); @@ -82,8 +88,10 @@ public class ReadingListRepository : IReadingListRepository public async Task> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams) { + var userAgeRating = (await _context.AppUser.SingleAsync(u => u.Id == userId)).AgeRestriction; var query = _context.ReadingList .Where(l => l.AppUserId == userId || (includePromoted && l.Promoted )) + .Where(l => l.AgeRating >= userAgeRating) .OrderBy(l => l.LastModified) .ProjectTo(_mapper.ConfigurationProvider) .AsNoTracking(); @@ -97,7 +105,7 @@ public class ReadingListRepository : IReadingListRepository .Where(l => l.AppUserId == userId || (includePromoted && l.Promoted )) .Where(l => l.Items.Any(i => i.SeriesId == seriesId)) .AsSplitQuery() - .OrderBy(l => l.LastModified) + .OrderBy(l => l.Title) .ProjectTo(_mapper.ConfigurationProvider) .AsNoTracking(); diff --git a/API/Data/Repositories/SeriesRepository.cs b/API/Data/Repositories/SeriesRepository.cs index 43b748a2a..4423db98d 100644 --- a/API/Data/Repositories/SeriesRepository.cs +++ b/API/Data/Repositories/SeriesRepository.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using System.Threading.Tasks; +using API.Data.Misc; using API.Data.Scanner; using API.DTOs; using API.DTOs.CollectionTags; @@ -33,29 +34,12 @@ public enum SeriesIncludes Volumes = 2, Metadata = 4, Related = 8, - //Related = 16, - //UserPreferences = 32 -} - -internal class RecentlyAddedSeries -{ - public int LibraryId { get; init; } - public LibraryType LibraryType { get; init; } - public DateTime Created { get; init; } - public int SeriesId { get; init; } - public string SeriesName { get; init; } - public MangaFormat Format { get; init; } - public int ChapterId { get; init; } - public int VolumeId { get; init; } - public string ChapterNumber { get; init; } - public string ChapterRange { get; init; } - public string ChapterTitle { get; init; } - public bool IsSpecial { get; init; } - public int VolumeNumber { get; init; } + Library = 16, } public interface ISeriesRepository { + void Add(Series series); void Attach(Series series); void Update(Series series); void Remove(Series series); @@ -79,9 +63,8 @@ public interface ISeriesRepository /// /// Task SearchSeries(int userId, bool isAdmin, int[] libraryIds, string searchQuery); - Task> GetSeriesForLibraryIdAsync(int libraryId); + Task> GetSeriesForLibraryIdAsync(int libraryId, SeriesIncludes includes = SeriesIncludes.None); Task GetSeriesDtoByIdAsync(int seriesId, int userId); - Task DeleteSeriesAsync(int seriesId); Task GetSeriesByIdAsync(int seriesId, SeriesIncludes includes = SeriesIncludes.Volumes | SeriesIncludes.Metadata); Task> GetSeriesByIdsAsync(IList seriesIds); Task GetChapterIdsForSeriesAsync(IList seriesIds); @@ -118,12 +101,11 @@ public interface ISeriesRepository Task GetSeriesForMangaFile(int mangaFileId, int userId); Task GetSeriesForChapter(int chapterId, int userId); Task> GetWantToReadForUserAsync(int userId, UserParams userParams, FilterDto filter); - Task GetSeriesIdByFolder(string folder); - Task GetSeriesByFolderPath(string folder); - Task GetFullSeriesByName(string series, int libraryId); + Task GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None); Task GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId, MangaFormat format, bool withFullIncludes = true); - Task> RemoveSeriesNotInList(IList seenSeries, int libraryId); + Task> RemoveSeriesNotInList(IList seenSeries, int libraryId); Task>> GetFolderPathMap(int libraryId); + Task GetMaxAgeRatingFromSeriesAsync(IEnumerable seriesIds); } public class SeriesRepository : ISeriesRepository @@ -136,6 +118,11 @@ public class SeriesRepository : ISeriesRepository _mapper = mapper; } + public void Add(Series series) + { + _context.Series.Add(series); + } + public void Attach(Series series) { _context.Series.Attach(series); @@ -172,12 +159,14 @@ public class SeriesRepository : ISeriesRepository } - public async Task> GetSeriesForLibraryIdAsync(int libraryId) + public async Task> GetSeriesForLibraryIdAsync(int libraryId, SeriesIncludes includes = SeriesIncludes.None) { - return await _context.Series - .Where(s => s.LibraryId == libraryId) - .OrderBy(s => s.SortName) - .ToListAsync(); + var query = _context.Series + .Where(s => s.LibraryId == libraryId); + + query = AddIncludesToQuery(query, includes); + + return await query.OrderBy(s => s.SortName).ToListAsync(); } /// @@ -303,9 +292,11 @@ public class SeriesRepository : ISeriesRepository const int maxRecords = 15; var result = new SearchResultGroupDto(); var searchQueryNormalized = Services.Tasks.Scanner.Parser.Parser.Normalize(searchQuery); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); var seriesIds = _context.Series .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(userRating) .Select(s => s.Id) .ToList(); @@ -329,6 +320,7 @@ public class SeriesRepository : ISeriesRepository || EF.Functions.Like(s.LocalizedName, $"%{searchQuery}%") || EF.Functions.Like(s.NormalizedName, $"%{searchQueryNormalized}%") || (hasYearInQuery && s.Metadata.ReleaseYear == yearComparison)) + .RestrictAgainstAgeRestriction(userRating) .Include(s => s.Library) .OrderBy(s => s.SortName) .AsNoTracking() @@ -337,19 +329,20 @@ public class SeriesRepository : ISeriesRepository .ProjectTo(_mapper.ConfigurationProvider) .AsEnumerable(); - result.ReadingLists = await _context.ReadingList .Where(rl => rl.AppUserId == userId || rl.Promoted) .Where(rl => EF.Functions.Like(rl.Title, $"%{searchQuery}%")) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .Take(maxRecords) .ProjectTo(_mapper.ConfigurationProvider) .ToListAsync(); result.Collections = await _context.CollectionTag - .Where(s => EF.Functions.Like(s.Title, $"%{searchQuery}%") - || EF.Functions.Like(s.NormalizedTitle, $"%{searchQueryNormalized}%")) - .Where(s => s.Promoted || isAdmin) + .Where(c => EF.Functions.Like(c.Title, $"%{searchQuery}%") + || EF.Functions.Like(c.NormalizedTitle, $"%{searchQueryNormalized}%")) + .Where(c => c.Promoted || isAdmin) + .RestrictAgainstAgeRestriction(userRating) .OrderBy(s => s.Title) .AsNoTracking() .AsSplitQuery() @@ -388,7 +381,7 @@ public class SeriesRepository : ISeriesRepository .ToListAsync(); var fileIds = _context.Series - .Where(s => libraryIds.Contains(s.LibraryId)) + .Where(s => seriesIds.Contains(s.Id)) .AsSplitQuery() .SelectMany(s => s.Volumes) .SelectMany(v => v.Chapters) @@ -426,15 +419,6 @@ public class SeriesRepository : ISeriesRepository return seriesList[0]; } - public async Task DeleteSeriesAsync(int seriesId) - { - var series = await _context.Series.Where(s => s.Id == seriesId).SingleOrDefaultAsync(); - if (series != null) _context.Series.Remove(series); - - return await _context.SaveChangesAsync() > 0; - } - - /// /// Returns Volumes, Metadata (Incl Genres and People), and Collection Tags /// @@ -447,29 +431,7 @@ public class SeriesRepository : ISeriesRepository .Where(s => s.Id == seriesId) .AsSplitQuery(); - if (includes.HasFlag(SeriesIncludes.Volumes)) - { - query = query.Include(s => s.Volumes); - } - - if (includes.HasFlag(SeriesIncludes.Related)) - { - query = query.Include(s => s.Relations) - .ThenInclude(r => r.TargetSeries) - .Include(s => s.RelationOf); - } - - if (includes.HasFlag(SeriesIncludes.Metadata)) - { - query = query.Include(s => s.Metadata) - .ThenInclude(m => m.CollectionTags) - .Include(s => s.Metadata) - .ThenInclude(m => m.Genres) - .Include(s => s.Metadata) - .ThenInclude(m => m.People) - .Include(s => s.Metadata) - .ThenInclude(m => m.Tags); - } + query = AddIncludesToQuery(query, includes); return await query.SingleOrDefaultAsync(); } @@ -605,7 +567,7 @@ public class SeriesRepository : ISeriesRepository private IList ExtractFilters(int libraryId, int userId, FilterDto filter, ref List userLibraries, out List allPeopleIds, out bool hasPeopleFilter, out bool hasGenresFilter, out bool hasCollectionTagFilter, out bool hasRatingFilter, out bool hasProgressFilter, out IList seriesIds, out bool hasAgeRating, out bool hasTagsFilter, - out bool hasLanguageFilter, out bool hasPublicationFilter, out bool hasSeriesNameFilter) + out bool hasLanguageFilter, out bool hasPublicationFilter, out bool hasSeriesNameFilter, out bool hasReleaseYearMinFilter, out bool hasReleaseYearMaxFilter) { var formats = filter.GetSqlFilter(); @@ -640,6 +602,9 @@ public class SeriesRepository : ISeriesRepository hasLanguageFilter = filter.Languages.Count > 0; hasPublicationFilter = filter.PublicationStatus.Count > 0; + hasReleaseYearMinFilter = filter.ReleaseYearRange != null && filter.ReleaseYearRange.Min != 0; + hasReleaseYearMaxFilter = filter.ReleaseYearRange != null && filter.ReleaseYearRange.Max != 0; + bool ProgressComparison(int pagesRead, int totalPages) { @@ -728,10 +693,13 @@ public class SeriesRepository : ISeriesRepository private async Task> CreateFilteredSearchQueryable(int userId, int libraryId, FilterDto filter) { var userLibraries = await GetUserLibraries(libraryId, userId); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); + var formats = ExtractFilters(libraryId, userId, filter, ref userLibraries, out var allPeopleIds, out var hasPeopleFilter, out var hasGenresFilter, out var hasCollectionTagFilter, out var hasRatingFilter, out var hasProgressFilter, - out var seriesIds, out var hasAgeRating, out var hasTagsFilter, out var hasLanguageFilter, out var hasPublicationFilter, out var hasSeriesNameFilter); + out var seriesIds, out var hasAgeRating, out var hasTagsFilter, out var hasLanguageFilter, + out var hasPublicationFilter, out var hasSeriesNameFilter, out var hasReleaseYearMinFilter, out var hasReleaseYearMaxFilter); var query = _context.Series .Where(s => userLibraries.Contains(s.LibraryId) @@ -745,12 +713,19 @@ public class SeriesRepository : ISeriesRepository && (!hasAgeRating || filter.AgeRating.Contains(s.Metadata.AgeRating)) && (!hasTagsFilter || s.Metadata.Tags.Any(t => filter.Tags.Contains(t.Id))) && (!hasLanguageFilter || filter.Languages.Contains(s.Metadata.Language)) + && (!hasReleaseYearMinFilter || s.Metadata.ReleaseYear >= filter.ReleaseYearRange.Min) + && (!hasReleaseYearMaxFilter || s.Metadata.ReleaseYear <= filter.ReleaseYearRange.Max) && (!hasPublicationFilter || filter.PublicationStatus.Contains(s.Metadata.PublicationStatus))) .Where(s => !hasSeriesNameFilter || EF.Functions.Like(s.Name, $"%{filter.SeriesNameQuery}%") || EF.Functions.Like(s.OriginalName, $"%{filter.SeriesNameQuery}%") - || EF.Functions.Like(s.LocalizedName, $"%{filter.SeriesNameQuery}%")) - .AsNoTracking(); + || EF.Functions.Like(s.LocalizedName, $"%{filter.SeriesNameQuery}%")); + if (userRating.AgeRating != AgeRating.NotApplicable) + { + query = query.RestrictAgainstAgeRestriction(userRating); + } + + query = query.AsNoTracking(); // If no sort options, default to using SortName filter.SortOptions ??= new SortOptions() @@ -768,6 +743,7 @@ public class SeriesRepository : ISeriesRepository SortField.LastModifiedDate => query.OrderBy(s => s.LastModified), SortField.LastChapterAdded => query.OrderBy(s => s.LastChapterAdded), SortField.TimeToRead => query.OrderBy(s => s.AvgHoursToRead), + SortField.ReleaseYear => query.OrderBy(s => s.Metadata.ReleaseYear), _ => query }; } @@ -780,6 +756,7 @@ public class SeriesRepository : ISeriesRepository SortField.LastModifiedDate => query.OrderByDescending(s => s.LastModified), SortField.LastChapterAdded => query.OrderByDescending(s => s.LastChapterAdded), SortField.TimeToRead => query.OrderByDescending(s => s.AvgHoursToRead), + SortField.ReleaseYear => query.OrderByDescending(s => s.Metadata.ReleaseYear), _ => query }; } @@ -793,7 +770,8 @@ public class SeriesRepository : ISeriesRepository var formats = ExtractFilters(libraryId, userId, filter, ref userLibraries, out var allPeopleIds, out var hasPeopleFilter, out var hasGenresFilter, out var hasCollectionTagFilter, out var hasRatingFilter, out var hasProgressFilter, - out var seriesIds, out var hasAgeRating, out var hasTagsFilter, out var hasLanguageFilter, out var hasPublicationFilter, out var hasSeriesNameFilter); + out var seriesIds, out var hasAgeRating, out var hasTagsFilter, out var hasLanguageFilter, + out var hasPublicationFilter, out var hasSeriesNameFilter, out var hasReleaseYearMinFilter, out var hasReleaseYearMaxFilter); var query = sQuery .Where(s => userLibraries.Contains(s.LibraryId) @@ -807,6 +785,8 @@ public class SeriesRepository : ISeriesRepository && (!hasAgeRating || filter.AgeRating.Contains(s.Metadata.AgeRating)) && (!hasTagsFilter || s.Metadata.Tags.Any(t => filter.Tags.Contains(t.Id))) && (!hasLanguageFilter || filter.Languages.Contains(s.Metadata.Language)) + && (!hasReleaseYearMinFilter || s.Metadata.ReleaseYear >= filter.ReleaseYearRange.Min) + && (!hasReleaseYearMaxFilter || s.Metadata.ReleaseYear <= filter.ReleaseYearRange.Max) && (!hasPublicationFilter || filter.PublicationStatus.Contains(s.Metadata.PublicationStatus))) .Where(s => !hasSeriesNameFilter || EF.Functions.Like(s.Name, $"%{filter.SeriesNameQuery}%") @@ -1017,40 +997,50 @@ public class SeriesRepository : ISeriesRepository public async Task> GetRecentlyUpdatedSeries(int userId, int pageSize = 30) { var seriesMap = new Dictionary(); - var index = 0; - foreach (var item in await GetRecentlyAddedChaptersQuery(userId)) + var index = 0; + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); + + var items = (await GetRecentlyAddedChaptersQuery(userId)); + if (userRating.AgeRating != AgeRating.NotApplicable) + { + items = items.RestrictAgainstAgeRestriction(userRating); + } + foreach (var item in items) + { + if (seriesMap.Keys.Count == pageSize) break; + + if (seriesMap.ContainsKey(item.SeriesName)) { - if (seriesMap.Keys.Count == pageSize) break; - - if (seriesMap.ContainsKey(item.SeriesName)) - { - seriesMap[item.SeriesName].Count += 1; - } - else - { - seriesMap[item.SeriesName] = new GroupedSeriesDto() - { - LibraryId = item.LibraryId, - LibraryType = item.LibraryType, - SeriesId = item.SeriesId, - SeriesName = item.SeriesName, - Created = item.Created, - Id = index, - Format = item.Format, - Count = 1, - }; - index += 1; - } + seriesMap[item.SeriesName].Count += 1; } + else + { + seriesMap[item.SeriesName] = new GroupedSeriesDto() + { + LibraryId = item.LibraryId, + LibraryType = item.LibraryType, + SeriesId = item.SeriesId, + SeriesName = item.SeriesName, + Created = item.Created, + Id = index, + Format = item.Format, + Count = 1, + }; + index += 1; + } + } - return seriesMap.Values.AsEnumerable(); + return seriesMap.Values.AsEnumerable(); } public async Task> GetSeriesForRelationKind(int userId, int seriesId, RelationKind kind) { var libraryIds = GetLibraryIdsForUser(userId); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); + var usersSeriesIds = _context.Series .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(userRating) .Select(s => s.Id); var targetSeries = _context.SeriesRelation @@ -1063,28 +1053,26 @@ public class SeriesRepository : ISeriesRepository return await _context.Series .Where(s => targetSeries.Contains(s.Id)) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .AsNoTracking() .ProjectTo(_mapper.ConfigurationProvider) .ToListAsync(); } - private IQueryable GetLibraryIdsForUser(int userId) - { - return _context.AppUser - .Where(u => u.Id == userId) - .AsSplitQuery() - .SelectMany(l => l.Libraries.Select(lib => lib.Id)); - } - public async Task> GetMoreIn(int userId, int libraryId, int genreId, UserParams userParams) { var libraryIds = GetLibraryIdsForUser(userId, libraryId); var usersSeriesIds = GetSeriesIdsForLibraryIds(libraryIds); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); + // Because this can be called from an API, we need to provide an additional check if the genre has anything the + // user with age restrictions can access + var query = _context.Series .Where(s => s.Metadata.Genres.Select(g => g.Id).Contains(genreId)) .Where(s => usersSeriesIds.Contains(s.Id)) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .ProjectTo(_mapper.ConfigurationProvider); @@ -1121,6 +1109,8 @@ public class SeriesRepository : ISeriesRepository public async Task GetSeriesForMangaFile(int mangaFileId, int userId) { var libraryIds = GetLibraryIdsForUser(userId); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); + return await _context.MangaFile .Where(m => m.Id == mangaFileId) .AsSplitQuery() @@ -1128,6 +1118,7 @@ public class SeriesRepository : ISeriesRepository .Select(c => c.Volume) .Select(v => v.Series) .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(userRating) .ProjectTo(_mapper.ConfigurationProvider) .SingleOrDefaultAsync(); } @@ -1135,81 +1126,32 @@ public class SeriesRepository : ISeriesRepository public async Task GetSeriesForChapter(int chapterId, int userId) { var libraryIds = GetLibraryIdsForUser(userId); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); return await _context.Chapter .Where(m => m.Id == chapterId) .AsSplitQuery() .Select(c => c.Volume) .Select(v => v.Series) .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(userRating) .ProjectTo(_mapper.ConfigurationProvider) .SingleOrDefaultAsync(); } - /// - /// Given a folder path return a Series with the that matches. - /// - /// This will apply normalization on the path. - /// - /// - public async Task GetSeriesIdByFolder(string folder) - { - var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder); - var series = await _context.Series - .Where(s => s.FolderPath.Equals(normalized)) - .SingleOrDefaultAsync(); - return series?.Id ?? 0; - } - /// /// Return a Series by Folder path. Null if not found. /// /// This will be normalized in the query + /// Additional relationships to include with the base query /// - public async Task GetSeriesByFolderPath(string folder) + public async Task GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None) { var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder); - return await _context.Series.SingleOrDefaultAsync(s => s.FolderPath.Equals(normalized)); - } + var query = _context.Series.Where(s => s.FolderPath.Equals(normalized)); - /// - /// Finds a series by series name for a given library. - /// - /// This pulls everything with the Series, so should be used only when needing tracking on all related tables - /// - /// - /// - public Task GetFullSeriesByName(string series, int libraryId) - { - var localizedSeries = Services.Tasks.Scanner.Parser.Parser.Normalize(series); - return _context.Series - .Where(s => (s.NormalizedName.Equals(localizedSeries) - || s.LocalizedName.Equals(series)) && s.LibraryId == libraryId) - .Include(s => s.Metadata) - .ThenInclude(m => m.People) - .Include(s => s.Metadata) - .ThenInclude(m => m.Genres) - .Include(s => s.Library) - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(cm => cm.People) + query = AddIncludesToQuery(query, includes); - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Tags) - - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Genres) - - - .Include(s => s.Metadata) - .ThenInclude(m => m.Tags) - - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Files) - .AsSplitQuery() - .SingleOrDefaultAsync(); + return await query.SingleOrDefaultAsync(); } /// @@ -1219,6 +1161,7 @@ public class SeriesRepository : ISeriesRepository /// /// /// + /// /// Defaults to true. This will query against all foreign keys (deep). If false, just the series will come back /// public Task GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId, MangaFormat format, bool withFullIncludes = true) @@ -1229,7 +1172,9 @@ public class SeriesRepository : ISeriesRepository .Where(s => s.LibraryId == libraryId) .Where(s => s.Format == format && format != MangaFormat.Unknown) .Where(s => s.NormalizedName.Equals(normalizedSeries) - || (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty)); + || (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty) + || s.OriginalName.Equals(seriesName)); + if (!string.IsNullOrEmpty(normalizedLocalized)) { query = query.Where(s => @@ -1275,20 +1220,39 @@ public class SeriesRepository : ISeriesRepository /// /// /// - public async Task> RemoveSeriesNotInList(IList seenSeries, int libraryId) + public async Task> RemoveSeriesNotInList(IList seenSeries, int libraryId) { - if (seenSeries.Count == 0) return new List(); + if (seenSeries.Count == 0) return Array.Empty(); + var ids = new List(); foreach (var parsedSeries in seenSeries) { - var series = await _context.Series - .Where(s => s.Format == parsedSeries.Format && s.NormalizedName == parsedSeries.NormalizedName && - s.LibraryId == libraryId) - .Select(s => s.Id) - .SingleOrDefaultAsync(); - if (series > 0) + try { - ids.Add(series); + var seriesId = await _context.Series + .Where(s => s.Format == parsedSeries.Format && s.NormalizedName == parsedSeries.NormalizedName && + s.LibraryId == libraryId) + .Select(s => s.Id) + .SingleOrDefaultAsync(); + if (seriesId > 0) + { + ids.Add(seriesId); + } + } + catch (Exception) + { + // This is due to v0.5.6 introducing bugs where we could have multiple series get duplicated and no way to delete them + // This here will delete the 2nd one as the first is the one to likely be used. + var sId = _context.Series + .Where(s => s.Format == parsedSeries.Format && s.NormalizedName == parsedSeries.NormalizedName && + s.LibraryId == libraryId) + .Select(s => s.Id) + .OrderBy(s => s) + .Last(); + if (sId > 0) + { + ids.Add(sId); + } } } @@ -1297,6 +1261,15 @@ public class SeriesRepository : ISeriesRepository .Where(s => !ids.Contains(s.Id)) .ToListAsync(); + // If the series to remove has Relation (related series), we must manually unlink due to the DB not being + // setup correctly (if this is not done, a foreign key constraint will be thrown) + + foreach (var sr in seriesToRemove) + { + sr.Relations = new List(); + Update(sr); + } + _context.Series.RemoveRange(seriesToRemove); return seriesToRemove; @@ -1310,9 +1283,11 @@ public class SeriesRepository : ISeriesRepository .Where(s => usersSeriesIds.Contains(s.SeriesId) && s.Rating > 4) .Select(p => p.SeriesId) .Distinct(); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); var query = _context.Series .Where(s => distinctSeriesIdsWithHighRating.Contains(s.Id)) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .OrderByDescending(s => _context.AppUserRating.Where(r => r.SeriesId == s.Id).Select(r => r.Rating).Average()) .ProjectTo(_mapper.ConfigurationProvider); @@ -1329,6 +1304,7 @@ public class SeriesRepository : ISeriesRepository .Where(s => usersSeriesIds.Contains(s.SeriesId)) .Select(p => p.SeriesId) .Distinct(); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); var query = _context.Series @@ -1338,6 +1314,7 @@ public class SeriesRepository : ISeriesRepository && !distinctSeriesIdsWithProgress.Contains(s.Id) && usersSeriesIds.Contains(s.Id)) .Where(s => s.Metadata.PublicationStatus != PublicationStatus.OnGoing) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .ProjectTo(_mapper.ConfigurationProvider); @@ -1354,6 +1331,8 @@ public class SeriesRepository : ISeriesRepository .Select(p => p.SeriesId) .Distinct(); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); + var query = _context.Series .Where(s => ( @@ -1362,6 +1341,7 @@ public class SeriesRepository : ISeriesRepository && !distinctSeriesIdsWithProgress.Contains(s.Id) && usersSeriesIds.Contains(s.Id)) .Where(s => s.Metadata.PublicationStatus == PublicationStatus.OnGoing) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .ProjectTo(_mapper.ConfigurationProvider); @@ -1375,43 +1355,56 @@ public class SeriesRepository : ISeriesRepository /// /// 0 for no library filter /// - private IQueryable GetLibraryIdsForUser(int userId, int libraryId) + private IQueryable GetLibraryIdsForUser(int userId, int libraryId = 0) { - return _context.AppUser - .Where(u => u.Id == userId) - .SelectMany(l => l.Libraries.Where(l => l.Id == libraryId || libraryId == 0).Select(lib => lib.Id)); + var query = _context.AppUser + .AsSplitQuery() + .AsNoTracking() + .Where(u => u.Id == userId); + + if (libraryId == 0) + { + return query.SelectMany(l => l.Libraries.Select(lib => lib.Id)); + } + + return query.SelectMany(l => + l.Libraries.Where(lib => lib.Id == libraryId).Select(lib => lib.Id)); } public async Task GetRelatedSeries(int userId, int seriesId) { var libraryIds = GetLibraryIdsForUser(userId); var usersSeriesIds = GetSeriesIdsForLibraryIds(libraryIds); + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); return new RelatedSeriesDto() { SourceSeriesId = seriesId, - Adaptations = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Adaptation), - Characters = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Character), - Prequels = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Prequel), - Sequels = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Sequel), - Contains = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Contains), - SideStories = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.SideStory), - SpinOffs = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.SpinOff), - Others = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Other), - AlternativeSettings = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.AlternativeSetting), - AlternativeVersions = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.AlternativeVersion), - Doujinshis = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Doujinshi), + Adaptations = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Adaptation, userRating), + Characters = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Character, userRating), + Prequels = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Prequel, userRating), + Sequels = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Sequel, userRating), + Contains = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Contains, userRating), + SideStories = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.SideStory, userRating), + SpinOffs = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.SpinOff, userRating), + Others = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Other, userRating), + AlternativeSettings = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.AlternativeSetting, userRating), + AlternativeVersions = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.AlternativeVersion, userRating), + Doujinshis = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Doujinshi, userRating), Parent = await _context.Series .SelectMany(s => s.RelationOf.Where(r => r.TargetSeriesId == seriesId && usersSeriesIds.Contains(r.TargetSeriesId) && r.RelationKind != RelationKind.Prequel - && r.RelationKind != RelationKind.Sequel) + && r.RelationKind != RelationKind.Sequel + && r.RelationKind != RelationKind.Edition) .Select(sr => sr.Series)) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .AsNoTracking() .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync() + .ToListAsync(), + Editions = await GetRelatedSeriesQuery(seriesId, usersSeriesIds, RelationKind.Edition, userRating) }; } @@ -1422,11 +1415,12 @@ public class SeriesRepository : ISeriesRepository .Select(s => s.Id); } - private async Task> GetRelatedSeriesQuery(int seriesId, IEnumerable usersSeriesIds, RelationKind kind) + private async Task> GetRelatedSeriesQuery(int seriesId, IEnumerable usersSeriesIds, RelationKind kind, AgeRestriction userRating) { return await _context.Series.SelectMany(s => s.Relations.Where(sr => sr.RelationKind == kind && sr.SeriesId == seriesId && usersSeriesIds.Contains(sr.TargetSeriesId)) .Select(sr => sr.TargetSeries)) + .RestrictAgainstAgeRestriction(userRating) .AsSplitQuery() .AsNoTracking() .ProjectTo(_mapper.ConfigurationProvider) @@ -1435,16 +1429,15 @@ public class SeriesRepository : ISeriesRepository private async Task> GetRecentlyAddedChaptersQuery(int userId) { - var libraries = await _context.AppUser + var libraryIds = await _context.AppUser .Where(u => u.Id == userId) .SelectMany(u => u.Libraries.Select(l => new {LibraryId = l.Id, LibraryType = l.Type})) + .Select(l => l.LibraryId) .ToListAsync(); - var libraryIds = libraries.Select(l => l.LibraryId).ToList(); var withinLastWeek = DateTime.Now - TimeSpan.FromDays(12); - var ret = _context.Chapter - .Where(c => c.Created >= withinLastWeek) - .AsNoTracking() + return _context.Chapter + .Where(c => c.Created >= withinLastWeek).AsNoTracking() .Include(c => c.Volume) .ThenInclude(v => v.Series) .ThenInclude(s => s.Library) @@ -1463,12 +1456,12 @@ public class SeriesRepository : ISeriesRepository ChapterRange = c.Range, IsSpecial = c.IsSpecial, VolumeNumber = c.Volume.Number, - ChapterTitle = c.Title + ChapterTitle = c.Title, + AgeRating = c.Volume.Series.Metadata.AgeRating }) .AsSplitQuery() .Where(c => c.Created >= withinLastWeek && libraryIds.Contains(c.LibraryId)) .AsEnumerable(); - return ret; } public async Task> GetWantToReadForUserAsync(int userId, UserParams userParams, FilterDto filter) @@ -1497,7 +1490,8 @@ public class SeriesRepository : ISeriesRepository LastScanned = s.LastFolderScanned, SeriesName = s.Name, FolderPath = s.FolderPath, - Format = s.Format + Format = s.Format, + LibraryRoots = s.Library.Folders.Select(f => f.Path) }).ToListAsync(); var map = new Dictionary>(); @@ -1519,4 +1513,55 @@ public class SeriesRepository : ISeriesRepository return map; } + + /// + /// Returns the highest Age Rating for a list of Series + /// + /// + /// + public async Task GetMaxAgeRatingFromSeriesAsync(IEnumerable seriesIds) + { + return await _context.Series + .Where(s => seriesIds.Contains(s.Id)) + .Include(s => s.Metadata) + .Select(s => s.Metadata.AgeRating) + .OrderBy(s => s) + .LastOrDefaultAsync(); + } + + private static IQueryable AddIncludesToQuery(IQueryable query, SeriesIncludes includeFlags) + { + // TODO: Move this to an Extension Method + if (includeFlags.HasFlag(SeriesIncludes.Library)) + { + query = query.Include(u => u.Library); + } + + if (includeFlags.HasFlag(SeriesIncludes.Volumes)) + { + query = query.Include(s => s.Volumes); + } + + if (includeFlags.HasFlag(SeriesIncludes.Related)) + { + query = query.Include(s => s.Relations) + .ThenInclude(r => r.TargetSeries) + .Include(s => s.RelationOf); + } + + if (includeFlags.HasFlag(SeriesIncludes.Metadata)) + { + query = query.Include(s => s.Metadata) + .ThenInclude(m => m.CollectionTags) + .Include(s => s.Metadata) + .ThenInclude(m => m.Genres) + .Include(s => s.Metadata) + .ThenInclude(m => m.People) + .Include(s => s.Metadata) + .ThenInclude(m => m.Tags); + } + + + return query.AsSplitQuery(); + } } diff --git a/API/Data/Repositories/TagRepository.cs b/API/Data/Repositories/TagRepository.cs index 8faf0440b..e4e3987d0 100644 --- a/API/Data/Repositories/TagRepository.cs +++ b/API/Data/Repositories/TagRepository.cs @@ -3,6 +3,7 @@ using System.Linq; using System.Threading.Tasks; using API.DTOs.Metadata; using API.Entities; +using API.Extensions; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; @@ -13,11 +14,10 @@ public interface ITagRepository { void Attach(Tag tag); void Remove(Tag tag); - Task FindByNameAsync(string tagName); Task> GetAllTagsAsync(); - Task> GetAllTagDtosAsync(); + Task> GetAllTagDtosAsync(int userId); Task RemoveAllTagNoLongerAssociated(bool removeExternal = false); - Task> GetAllTagDtosForLibrariesAsync(IList libraryIds); + Task> GetAllTagDtosForLibrariesAsync(IList libraryIds, int userId); } public class TagRepository : ITagRepository @@ -41,13 +41,6 @@ public class TagRepository : ITagRepository _context.Tag.Remove(tag); } - public async Task FindByNameAsync(string tagName) - { - var normalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize(tagName); - return await _context.Tag - .FirstOrDefaultAsync(g => g.NormalizedTitle.Equals(normalizedName)); - } - public async Task RemoveAllTagNoLongerAssociated(bool removeExternal = false) { var tagsWithNoConnections = await _context.Tag @@ -62,10 +55,12 @@ public class TagRepository : ITagRepository await _context.SaveChangesAsync(); } - public async Task> GetAllTagDtosForLibrariesAsync(IList libraryIds) + public async Task> GetAllTagDtosForLibrariesAsync(IList libraryIds, int userId) { + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); return await _context.Series .Where(s => libraryIds.Contains(s.LibraryId)) + .RestrictAgainstAgeRestriction(userRating) .SelectMany(s => s.Metadata.Tags) .AsSplitQuery() .Distinct() @@ -80,10 +75,12 @@ public class TagRepository : ITagRepository return await _context.Tag.ToListAsync(); } - public async Task> GetAllTagDtosAsync() + public async Task> GetAllTagDtosAsync(int userId) { + var userRating = await _context.AppUser.GetUserAgeRestriction(userId); return await _context.Tag .AsNoTracking() + .RestrictAgainstAgeRestriction(userRating) .OrderBy(t => t.Title) .ProjectTo(_mapper.ConfigurationProvider) .ToListAsync(); diff --git a/API/Data/Repositories/UserRepository.cs b/API/Data/Repositories/UserRepository.cs index e02f414f4..c7115081b 100644 --- a/API/Data/Repositories/UserRepository.cs +++ b/API/Data/Repositories/UserRepository.cs @@ -5,12 +5,15 @@ using System.Linq; using System.Threading.Tasks; using API.Constants; using API.DTOs; +using API.DTOs.Account; +using API.DTOs.Filtering; using API.DTOs.Reader; using API.Entities; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; +using SixLabors.ImageSharp.PixelFormats; namespace API.Data.Repositories; @@ -25,6 +28,7 @@ public enum AppUserIncludes UserPreferences = 32, WantToRead = 64, ReadingListsWithItems = 128, + Devices = 256, } @@ -33,6 +37,7 @@ public interface IUserRepository void Update(AppUser user); void Update(AppUserPreferences preferences); void Update(AppUserBookmark bookmark); + void Add(AppUserBookmark bookmark); public void Delete(AppUser user); void Delete(AppUserBookmark bookmark); Task> GetEmailConfirmedMemberDtosAsync(); @@ -44,7 +49,7 @@ public interface IUserRepository Task> GetBookmarkDtosForSeries(int userId, int seriesId); Task> GetBookmarkDtosForVolume(int userId, int volumeId); Task> GetBookmarkDtosForChapter(int userId, int chapterId); - Task> GetAllBookmarkDtos(int userId); + Task> GetAllBookmarkDtos(int userId, FilterDto filter); Task> GetAllBookmarksAsync(); Task GetBookmarkForPage(int page, int chapterId, int userId); Task GetBookmarkAsync(int bookmarkId); @@ -58,6 +63,7 @@ public interface IUserRepository Task> GetAllPreferencesByThemeAsync(int themeId); Task HasAccessToLibrary(int libraryId, int userId); Task> GetAllUsersAsync(AppUserIncludes includeFlags); + Task GetUserByConfirmationToken(string token); } public class UserRepository : IUserRepository @@ -88,6 +94,11 @@ public class UserRepository : IUserRepository _context.Entry(bookmark).State = EntityState.Modified; } + public void Add(AppUserBookmark bookmark) + { + _context.AppUserBookmark.Add(bookmark); + } + public void Delete(AppUser user) { _context.AppUser.Remove(user); @@ -186,6 +197,11 @@ public class UserRepository : IUserRepository query = query.Include(u => u.WantToRead); } + if (includeFlags.HasFlag(AppUserIncludes.Devices)) + { + query = query.Include(u => u.Devices); + } + return query; @@ -227,7 +243,8 @@ public class UserRepository : IUserRepository public async Task> GetAllUsers() { - return await _context.AppUser.ToListAsync(); + return await _context.AppUser + .ToListAsync(); } public async Task> GetAllPreferencesByThemeAsync(int themeId) @@ -253,6 +270,11 @@ public class UserRepository : IUserRepository return await query.ToListAsync(); } + public async Task GetUserByConfirmationToken(string token) + { + return await _context.AppUser.SingleOrDefaultAsync(u => u.ConfirmationToken.Equals(token)); + } + public async Task> GetAdminUsersAsync() { return await _userManager.GetUsersInRoleAsync(PolicyConstants.AdminRole); @@ -309,12 +331,38 @@ public class UserRepository : IUserRepository .ToListAsync(); } - public async Task> GetAllBookmarkDtos(int userId) + /// + /// Get all bookmarks for the user + /// + /// + /// Only supports SeriesNameQuery + /// + public async Task> GetAllBookmarkDtos(int userId, FilterDto filter) { - return await _context.AppUserBookmark + var query = _context.AppUserBookmark .Where(x => x.AppUserId == userId) .OrderBy(x => x.Page) - .AsNoTracking() + .AsNoTracking(); + + if (!string.IsNullOrEmpty(filter.SeriesNameQuery)) + { + var seriesNameQueryNormalized = Services.Tasks.Scanner.Parser.Parser.Normalize(filter.SeriesNameQuery); + var filterSeriesQuery = query.Join(_context.Series, b => b.SeriesId, s => s.Id, (bookmark, series) => new + { + bookmark, + series + }) + .Where(o => EF.Functions.Like(o.series.Name, $"%{filter.SeriesNameQuery}%") + || EF.Functions.Like(o.series.OriginalName, $"%{filter.SeriesNameQuery}%") + || EF.Functions.Like(o.series.LocalizedName, $"%{filter.SeriesNameQuery}%") + || EF.Functions.Like(o.series.NormalizedName, $"%{seriesNameQueryNormalized}%") + ); + + query = filterSeriesQuery.Select(o => o.bookmark); + } + + + return await query .ProjectTo(_mapper.ConfigurationProvider) .ToListAsync(); } @@ -349,6 +397,11 @@ public class UserRepository : IUserRepository Created = u.Created, LastActive = u.LastActive, Roles = u.UserRoles.Select(r => r.Role.Name).ToList(), + AgeRestriction = new AgeRestrictionDto() + { + AgeRating = u.AgeRestriction, + IncludeUnknowns = u.AgeRestrictionIncludeUnknowns + }, Libraries = u.Libraries.Select(l => new LibraryDto { Name = l.Name, @@ -362,10 +415,14 @@ public class UserRepository : IUserRepository .ToListAsync(); } + /// + /// Returns a list of users that are considered Pending by invite. This means email is unconfirmed and they have never logged in + /// + /// public async Task> GetPendingMemberDtosAsync() { return await _context.Users - .Where(u => !u.EmailConfirmed) + .Where(u => !u.EmailConfirmed && u.LastActive == DateTime.MinValue) .Include(x => x.Libraries) .Include(r => r.UserRoles) .ThenInclude(r => r.Role) @@ -378,6 +435,11 @@ public class UserRepository : IUserRepository Created = u.Created, LastActive = u.LastActive, Roles = u.UserRoles.Select(r => r.Role.Name).ToList(), + AgeRestriction = new AgeRestrictionDto() + { + AgeRating = u.AgeRestriction, + IncludeUnknowns = u.AgeRestrictionIncludeUnknowns + }, Libraries = u.Libraries.Select(l => new LibraryDto { Name = l.Name, diff --git a/API/Data/Scanner/Chunk.cs b/API/Data/Scanner/Chunk.cs index 9a9e04f5c..78091200d 100644 --- a/API/Data/Scanner/Chunk.cs +++ b/API/Data/Scanner/Chunk.cs @@ -1,21 +1,20 @@ -namespace API.Data.Scanner +namespace API.Data.Scanner; + +/// +/// Represents a set of Entities which is broken up and iterated on +/// +public class Chunk { /// - /// Represents a set of Entities which is broken up and iterated on + /// Total number of entities /// - public class Chunk - { - /// - /// Total number of entities - /// - public int TotalSize { get; set; } - /// - /// Size of each chunk to iterate over - /// - public int ChunkSize { get; set; } - /// - /// Total chunks to iterate over - /// - public int TotalChunks { get; set; } - } + public int TotalSize { get; set; } + /// + /// Size of each chunk to iterate over + /// + public int ChunkSize { get; set; } + /// + /// Total chunks to iterate over + /// + public int TotalChunks { get; set; } } diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index 97e141eab..15e68abeb 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -14,133 +14,131 @@ using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; -namespace API.Data +namespace API.Data; + +public static class Seed { - public static class Seed + /// + /// Generated on Startup. Seed.SeedSettings must run before + /// + public static ImmutableArray DefaultSettings; + + public static readonly ImmutableArray DefaultThemes = ImmutableArray.Create( + new List + { + new() + { + Name = "Dark", + NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize("Dark"), + Provider = ThemeProvider.System, + FileName = "dark.scss", + IsDefault = true, + } + }.ToArray()); + + public static async Task SeedRoles(RoleManager roleManager) { - /// - /// Generated on Startup. Seed.SeedSettings must run before - /// - public static ImmutableArray DefaultSettings; + var roles = typeof(PolicyConstants) + .GetFields(BindingFlags.Public | BindingFlags.Static) + .Where(f => f.FieldType == typeof(string)) + .ToDictionary(f => f.Name, + f => (string) f.GetValue(null)).Values + .Select(policyName => new AppRole() {Name = policyName}) + .ToList(); - public static readonly ImmutableArray DefaultThemes = ImmutableArray.Create( - new List - { - new() - { - Name = "Dark", - NormalizedName = Services.Tasks.Scanner.Parser.Parser.Normalize("Dark"), - Provider = ThemeProvider.System, - FileName = "dark.scss", - IsDefault = true, - } - }.ToArray()); - - public static async Task SeedRoles(RoleManager roleManager) + foreach (var role in roles) { - var roles = typeof(PolicyConstants) - .GetFields(BindingFlags.Public | BindingFlags.Static) - .Where(f => f.FieldType == typeof(string)) - .ToDictionary(f => f.Name, - f => (string) f.GetValue(null)).Values - .Select(policyName => new AppRole() {Name = policyName}) - .ToList(); - - foreach (var role in roles) + var exists = await roleManager.RoleExistsAsync(role.Name); + if (!exists) { - var exists = await roleManager.RoleExistsAsync(role.Name); - if (!exists) - { - await roleManager.CreateAsync(role); - } + await roleManager.CreateAsync(role); } } - - public static async Task SeedThemes(DataContext context) - { - await context.Database.EnsureCreatedAsync(); - - foreach (var theme in DefaultThemes) - { - var existing = context.SiteTheme.FirstOrDefault(s => s.Name.Equals(theme.Name)); - if (existing == null) - { - await context.SiteTheme.AddAsync(theme); - } - } - - await context.SaveChangesAsync(); - } - - public static async Task SeedSettings(DataContext context, IDirectoryService directoryService) - { - await context.Database.EnsureCreatedAsync(); - DefaultSettings = ImmutableArray.Create(new List() - { - new() {Key = ServerSettingKey.CacheDirectory, Value = directoryService.CacheDirectory}, - new() {Key = ServerSettingKey.TaskScan, Value = "daily"}, - new() - { - Key = ServerSettingKey.LoggingLevel, Value = "Information" - }, // Not used from DB, but DB is sync with appSettings.json - new() {Key = ServerSettingKey.TaskBackup, Value = "daily"}, - new() - { - Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(DirectoryService.BackupDirectory) - }, - new() - { - Key = ServerSettingKey.Port, Value = "5000" - }, // Not used from DB, but DB is sync with appSettings.json - new() {Key = ServerSettingKey.AllowStatCollection, Value = "true"}, - new() {Key = ServerSettingKey.EnableOpds, Value = "false"}, - new() {Key = ServerSettingKey.EnableAuthentication, Value = "true"}, - new() {Key = ServerSettingKey.BaseUrl, Value = "/"}, - new() {Key = ServerSettingKey.InstallId, Value = HashUtil.AnonymousToken()}, - new() {Key = ServerSettingKey.InstallVersion, Value = BuildInfo.Version.ToString()}, - new() {Key = ServerSettingKey.BookmarkDirectory, Value = directoryService.BookmarkDirectory}, - new() {Key = ServerSettingKey.EmailServiceUrl, Value = EmailService.DefaultApiUrl}, - new() {Key = ServerSettingKey.ConvertBookmarkToWebP, Value = "false"}, - new() {Key = ServerSettingKey.EnableSwaggerUi, Value = "false"}, - new() {Key = ServerSettingKey.TotalBackups, Value = "30"}, - new() {Key = ServerSettingKey.EnableFolderWatching, Value = "false"}, - }.ToArray()); - - foreach (var defaultSetting in DefaultSettings) - { - var existing = context.ServerSetting.FirstOrDefault(s => s.Key == defaultSetting.Key); - if (existing == null) - { - await context.ServerSetting.AddAsync(defaultSetting); - } - } - - await context.SaveChangesAsync(); - - // Port and LoggingLevel are managed in appSettings.json. Update the DB values to match - context.ServerSetting.First(s => s.Key == ServerSettingKey.Port).Value = - Configuration.Port + string.Empty; - context.ServerSetting.First(s => s.Key == ServerSettingKey.LoggingLevel).Value = - Configuration.LogLevel + string.Empty; - context.ServerSetting.First(s => s.Key == ServerSettingKey.CacheDirectory).Value = - directoryService.CacheDirectory + string.Empty; - context.ServerSetting.First(s => s.Key == ServerSettingKey.BackupDirectory).Value = - DirectoryService.BackupDirectory + string.Empty; - - await context.SaveChangesAsync(); - - } - - public static async Task SeedUserApiKeys(DataContext context) - { - await context.Database.EnsureCreatedAsync(); - - var users = await context.AppUser.ToListAsync(); - foreach (var user in users.Where(user => string.IsNullOrEmpty(user.ApiKey))) - { - user.ApiKey = HashUtil.ApiKey(); - } - await context.SaveChangesAsync(); - } + } + + public static async Task SeedThemes(DataContext context) + { + await context.Database.EnsureCreatedAsync(); + + foreach (var theme in DefaultThemes) + { + var existing = context.SiteTheme.FirstOrDefault(s => s.Name.Equals(theme.Name)); + if (existing == null) + { + await context.SiteTheme.AddAsync(theme); + } + } + + await context.SaveChangesAsync(); + } + + public static async Task SeedSettings(DataContext context, IDirectoryService directoryService) + { + await context.Database.EnsureCreatedAsync(); + DefaultSettings = ImmutableArray.Create(new List() + { + new() {Key = ServerSettingKey.CacheDirectory, Value = directoryService.CacheDirectory}, + new() {Key = ServerSettingKey.TaskScan, Value = "daily"}, + new() + { + Key = ServerSettingKey.LoggingLevel, Value = "Information" + }, // Not used from DB, but DB is sync with appSettings.json + new() {Key = ServerSettingKey.TaskBackup, Value = "daily"}, + new() + { + Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(DirectoryService.BackupDirectory) + }, + new() + { + Key = ServerSettingKey.Port, Value = "5000" + }, // Not used from DB, but DB is sync with appSettings.json + new() {Key = ServerSettingKey.AllowStatCollection, Value = "true"}, + new() {Key = ServerSettingKey.EnableOpds, Value = "false"}, + new() {Key = ServerSettingKey.EnableAuthentication, Value = "true"}, + new() {Key = ServerSettingKey.BaseUrl, Value = "/"}, + new() {Key = ServerSettingKey.InstallId, Value = HashUtil.AnonymousToken()}, + new() {Key = ServerSettingKey.InstallVersion, Value = BuildInfo.Version.ToString()}, + new() {Key = ServerSettingKey.BookmarkDirectory, Value = directoryService.BookmarkDirectory}, + new() {Key = ServerSettingKey.EmailServiceUrl, Value = EmailService.DefaultApiUrl}, + new() {Key = ServerSettingKey.ConvertBookmarkToWebP, Value = "false"}, + new() {Key = ServerSettingKey.EnableSwaggerUi, Value = "false"}, + new() {Key = ServerSettingKey.TotalBackups, Value = "30"}, + new() {Key = ServerSettingKey.TotalLogs, Value = "30"}, + new() {Key = ServerSettingKey.EnableFolderWatching, Value = "false"}, + }.ToArray()); + + foreach (var defaultSetting in DefaultSettings) + { + var existing = context.ServerSetting.FirstOrDefault(s => s.Key == defaultSetting.Key); + if (existing == null) + { + await context.ServerSetting.AddAsync(defaultSetting); + } + } + + await context.SaveChangesAsync(); + + // Port and LoggingLevel are managed in appSettings.json. Update the DB values to match + context.ServerSetting.First(s => s.Key == ServerSettingKey.Port).Value = + Configuration.Port + string.Empty; + context.ServerSetting.First(s => s.Key == ServerSettingKey.CacheDirectory).Value = + directoryService.CacheDirectory + string.Empty; + context.ServerSetting.First(s => s.Key == ServerSettingKey.BackupDirectory).Value = + DirectoryService.BackupDirectory + string.Empty; + + await context.SaveChangesAsync(); + + } + + public static async Task SeedUserApiKeys(DataContext context) + { + await context.Database.EnsureCreatedAsync(); + + var users = await context.AppUser.ToListAsync(); + foreach (var user in users.Where(user => string.IsNullOrEmpty(user.ApiKey))) + { + user.ApiKey = HashUtil.ApiKey(); + } + await context.SaveChangesAsync(); } } diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index 2d2adac42..50aadf421 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -24,6 +24,7 @@ public interface IUnitOfWork ITagRepository TagRepository { get; } ISiteThemeRepository SiteThemeRepository { get; } IMangaFileRepository MangaFileRepository { get; } + IDeviceRepository DeviceRepository { get; } bool Commit(); Task CommitAsync(); bool HasChanges(); @@ -60,6 +61,7 @@ public class UnitOfWork : IUnitOfWork public ITagRepository TagRepository => new TagRepository(_context, _mapper); public ISiteThemeRepository SiteThemeRepository => new SiteThemeRepository(_context, _mapper); public IMangaFileRepository MangaFileRepository => new MangaFileRepository(_context, _mapper); + public IDeviceRepository DeviceRepository => new DeviceRepository(_context, _mapper); /// /// Commits changes to the DB. Completes the open transaction. diff --git a/API/Entities/AppRole.cs b/API/Entities/AppRole.cs index 8c0d07f96..e27311027 100644 --- a/API/Entities/AppRole.cs +++ b/API/Entities/AppRole.cs @@ -1,10 +1,9 @@ using System.Collections.Generic; using Microsoft.AspNetCore.Identity; -namespace API.Entities +namespace API.Entities; + +public class AppRole : IdentityRole { - public class AppRole : IdentityRole - { - public ICollection UserRoles { get; set; } - } -} \ No newline at end of file + public ICollection UserRoles { get; set; } +} diff --git a/API/Entities/AppUser.cs b/API/Entities/AppUser.cs index 640860a0f..8a603ba57 100644 --- a/API/Entities/AppUser.cs +++ b/API/Entities/AppUser.cs @@ -1,49 +1,63 @@ using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; +using API.Entities.Enums; using API.Entities.Interfaces; using Microsoft.AspNetCore.Identity; -namespace API.Entities +namespace API.Entities; + +public class AppUser : IdentityUser, IHasConcurrencyToken { - public class AppUser : IdentityUser, IHasConcurrencyToken + public DateTime Created { get; set; } = DateTime.Now; + public DateTime LastActive { get; set; } + public ICollection Libraries { get; set; } + public ICollection UserRoles { get; set; } + public ICollection Progresses { get; set; } + public ICollection Ratings { get; set; } + public AppUserPreferences UserPreferences { get; set; } + /// + /// Bookmarks associated with this User + /// + public ICollection Bookmarks { get; set; } + /// + /// Reading lists associated with this user + /// + public ICollection ReadingLists { get; set; } + /// + /// A list of Series the user want's to read + /// + public ICollection WantToRead { get; set; } + /// + /// A list of Devices which allows the user to send files to + /// + public ICollection Devices { get; set; } + /// + /// An API Key to interact with external services, like OPDS + /// + public string ApiKey { get; set; } + /// + /// The confirmation token for the user (invite). This will be set to null after the user confirms. + /// + public string ConfirmationToken { get; set; } + /// + /// The highest age rating the user has access to. Not applicable for admins + /// + public AgeRating AgeRestriction { get; set; } = AgeRating.NotApplicable; + /// + /// If an age rating restriction is applied to the account, if Unknowns should be allowed for the user. Defaults to false. + /// + public bool AgeRestrictionIncludeUnknowns { get; set; } = false; + + /// + [ConcurrencyCheck] + public uint RowVersion { get; private set; } + + /// + public void OnSavingChanges() { - public DateTime Created { get; set; } = DateTime.Now; - public DateTime LastActive { get; set; } - public ICollection Libraries { get; set; } - public ICollection UserRoles { get; set; } - public ICollection Progresses { get; set; } - public ICollection Ratings { get; set; } - public AppUserPreferences UserPreferences { get; set; } - public ICollection Bookmarks { get; set; } - /// - /// Reading lists associated with this user - /// - public ICollection ReadingLists { get; set; } - /// - /// A list of Series the user want's to read - /// - public ICollection WantToRead { get; set; } - /// - /// An API Key to interact with external services, like OPDS - /// - public string ApiKey { get; set; } - /// - /// The confirmation token for the user (invite). This will be set to null after the user confirms. - /// - public string ConfirmationToken { get; set; } - - - /// - [ConcurrencyCheck] - public uint RowVersion { get; private set; } - - /// - public void OnSavingChanges() - { - RowVersion++; - } - + RowVersion++; } + } diff --git a/API/Entities/AppUserBookmark.cs b/API/Entities/AppUserBookmark.cs index 6d1ff0450..faaf431b3 100644 --- a/API/Entities/AppUserBookmark.cs +++ b/API/Entities/AppUserBookmark.cs @@ -2,30 +2,29 @@ using System.Text.Json.Serialization; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +/// +/// Represents a saved page in a Chapter entity for a given user. +/// +public class AppUserBookmark : IEntityDate { + public int Id { get; set; } + public int Page { get; set; } + public int SeriesId { get; set; } + public int VolumeId { get; set; } + public int ChapterId { get; set; } + /// - /// Represents a saved page in a Chapter entity for a given user. + /// Filename in the Bookmark Directory /// - public class AppUserBookmark : IEntityDate - { - public int Id { get; set; } - public int Page { get; set; } - public int VolumeId { get; set; } - public int SeriesId { get; set; } - public int ChapterId { get; set; } - - /// - /// Filename in the Bookmark Directory - /// - public string FileName { get; set; } = string.Empty; + public string FileName { get; set; } = string.Empty; - // Relationships - [JsonIgnore] - public AppUser AppUser { get; set; } - public int AppUserId { get; set; } - public DateTime Created { get; set; } - public DateTime LastModified { get; set; } - } + // Relationships + [JsonIgnore] + public AppUser AppUser { get; set; } + public int AppUserId { get; set; } + public DateTime Created { get; set; } + public DateTime LastModified { get; set; } } diff --git a/API/Entities/AppUserPreferences.cs b/API/Entities/AppUserPreferences.cs index 477f37999..f29ede382 100644 --- a/API/Entities/AppUserPreferences.cs +++ b/API/Entities/AppUserPreferences.cs @@ -1,109 +1,112 @@ using API.Entities.Enums; using API.Entities.Enums.UserPreferences; -namespace API.Entities +namespace API.Entities; + +public class AppUserPreferences { - public class AppUserPreferences - { - public int Id { get; set; } - /// - /// Manga Reader Option: What direction should the next/prev page buttons go - /// - public ReadingDirection ReadingDirection { get; set; } = ReadingDirection.LeftToRight; - /// - /// Manga Reader Option: How should the image be scaled to screen - /// - public ScalingOption ScalingOption { get; set; } = ScalingOption.Automatic; - /// - /// Manga Reader Option: Which side of a split image should we show first - /// - public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.FitSplit; - /// - /// Manga Reader Option: How the manga reader should perform paging or reading of the file - /// - /// Webtoon uses scrolling to page, MANGA_LR uses paging by clicking left/right side of reader, MANGA_UD uses paging - /// by clicking top/bottom sides of reader. - /// - /// - public ReaderMode ReaderMode { get; set; } + public int Id { get; set; } + /// + /// Manga Reader Option: What direction should the next/prev page buttons go + /// + public ReadingDirection ReadingDirection { get; set; } = ReadingDirection.LeftToRight; + /// + /// Manga Reader Option: How should the image be scaled to screen + /// + public ScalingOption ScalingOption { get; set; } = ScalingOption.Automatic; + /// + /// Manga Reader Option: Which side of a split image should we show first + /// + public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.FitSplit; + /// + /// Manga Reader Option: How the manga reader should perform paging or reading of the file + /// + /// Webtoon uses scrolling to page, MANGA_LR uses paging by clicking left/right side of reader, MANGA_UD uses paging + /// by clicking top/bottom sides of reader. + /// + /// + public ReaderMode ReaderMode { get; set; } - /// - /// Manga Reader Option: Allow the menu to close after 6 seconds without interaction - /// - public bool AutoCloseMenu { get; set; } = true; - /// - /// Manga Reader Option: Show screen hints to the user on some actions, ie) pagination direction change - /// - public bool ShowScreenHints { get; set; } = true; - /// - /// Manga Reader Option: How many pages to display in the reader at once - /// - public LayoutMode LayoutMode { get; set; } = LayoutMode.Single; - /// - /// Manga Reader Option: Background color of the reader - /// - public string BackgroundColor { get; set; } = "#000000"; - /// - /// Book Reader Option: Override extra Margin - /// - public int BookReaderMargin { get; set; } = 15; - /// - /// Book Reader Option: Override line-height - /// - public int BookReaderLineSpacing { get; set; } = 100; - /// - /// Book Reader Option: Override font size - /// - public int BookReaderFontSize { get; set; } = 100; - /// - /// Book Reader Option: Maps to the default Kavita font-family (inherit) or an override - /// - public string BookReaderFontFamily { get; set; } = "default"; - /// - /// Book Reader Option: Allows tapping on side of screens to paginate - /// - public bool BookReaderTapToPaginate { get; set; } = false; - /// - /// Book Reader Option: What direction should the next/prev page buttons go - /// - public ReadingDirection BookReaderReadingDirection { get; set; } = ReadingDirection.LeftToRight; - /// - /// UI Site Global Setting: The UI theme the user should use. - /// - /// Should default to Dark - public SiteTheme Theme { get; set; } - /// - /// Book Reader Option: The color theme to decorate the book contents - /// - /// Should default to Dark - public string BookThemeName { get; set; } = "Dark"; - /// - /// Book Reader Option: The way a page from a book is rendered. Default is as book dictates, 1 column is fit to height, - /// 2 column is fit to height, 2 columns - /// - /// Defaults to Default - public BookPageLayoutMode BookReaderLayoutMode { get; set; } = BookPageLayoutMode.Default; - /// - /// Book Reader Option: A flag that hides the menu-ing system behind a click on the screen. This should be used with tap to paginate, but the app doesn't enforce this. - /// - /// Defaults to false - public bool BookReaderImmersiveMode { get; set; } = false; - /// - /// Global Site Option: If the UI should layout items as Cards or List items - /// - /// Defaults to Cards - public PageLayoutMode GlobalPageLayoutMode { get; set; } = PageLayoutMode.Cards; - /// - /// UI Site Global Setting: If unread summaries should be blurred until expanded or unless user has read it already - /// - /// Defaults to false - public bool BlurUnreadSummaries { get; set; } = false; - /// - /// UI Site Global Setting: Should Kavita prompt user to confirm downloads that are greater than 100 MB. - /// - public bool PromptForDownloadSize { get; set; } = true; + /// + /// Manga Reader Option: Allow the menu to close after 6 seconds without interaction + /// + public bool AutoCloseMenu { get; set; } = true; + /// + /// Manga Reader Option: Show screen hints to the user on some actions, ie) pagination direction change + /// + public bool ShowScreenHints { get; set; } = true; + /// + /// Manga Reader Option: How many pages to display in the reader at once + /// + public LayoutMode LayoutMode { get; set; } = LayoutMode.Single; + /// + /// Manga Reader Option: Background color of the reader + /// + public string BackgroundColor { get; set; } = "#000000"; + /// + /// Book Reader Option: Override extra Margin + /// + public int BookReaderMargin { get; set; } = 15; + /// + /// Book Reader Option: Override line-height + /// + public int BookReaderLineSpacing { get; set; } = 100; + /// + /// Book Reader Option: Override font size + /// + public int BookReaderFontSize { get; set; } = 100; + /// + /// Book Reader Option: Maps to the default Kavita font-family (inherit) or an override + /// + public string BookReaderFontFamily { get; set; } = "default"; + /// + /// Book Reader Option: Allows tapping on side of screens to paginate + /// + public bool BookReaderTapToPaginate { get; set; } = false; + /// + /// Book Reader Option: What direction should the next/prev page buttons go + /// + public ReadingDirection BookReaderReadingDirection { get; set; } = ReadingDirection.LeftToRight; + /// + /// UI Site Global Setting: The UI theme the user should use. + /// + /// Should default to Dark + public SiteTheme Theme { get; set; } + /// + /// Book Reader Option: The color theme to decorate the book contents + /// + /// Should default to Dark + public string BookThemeName { get; set; } = "Dark"; + /// + /// Book Reader Option: The way a page from a book is rendered. Default is as book dictates, 1 column is fit to height, + /// 2 column is fit to height, 2 columns + /// + /// Defaults to Default + public BookPageLayoutMode BookReaderLayoutMode { get; set; } = BookPageLayoutMode.Default; + /// + /// Book Reader Option: A flag that hides the menu-ing system behind a click on the screen. This should be used with tap to paginate, but the app doesn't enforce this. + /// + /// Defaults to false + public bool BookReaderImmersiveMode { get; set; } = false; + /// + /// Global Site Option: If the UI should layout items as Cards or List items + /// + /// Defaults to Cards + public PageLayoutMode GlobalPageLayoutMode { get; set; } = PageLayoutMode.Cards; + /// + /// UI Site Global Setting: If unread summaries should be blurred until expanded or unless user has read it already + /// + /// Defaults to false + public bool BlurUnreadSummaries { get; set; } = false; + /// + /// UI Site Global Setting: Should Kavita prompt user to confirm downloads that are greater than 100 MB. + /// + public bool PromptForDownloadSize { get; set; } = true; + /// + /// UI Site Global Setting: Should Kavita disable CSS transitions + /// + public bool NoTransitions { get; set; } = false; - public AppUser AppUser { get; set; } - public int AppUserId { get; set; } - } + public AppUser AppUser { get; set; } + public int AppUserId { get; set; } } diff --git a/API/Entities/AppUserProgress.cs b/API/Entities/AppUserProgress.cs index 1704628cb..6804bfa98 100644 --- a/API/Entities/AppUserProgress.cs +++ b/API/Entities/AppUserProgress.cs @@ -2,56 +2,55 @@ using System; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +/// +/// Represents the progress a single user has on a given Chapter. +/// +//[Index(nameof(SeriesId), nameof(VolumeId), nameof(ChapterId), nameof(AppUserId), IsUnique = true)] +public class AppUserProgress : IEntityDate { /// - /// Represents the progress a single user has on a given Chapter. + /// Id of Entity /// - //[Index(nameof(SeriesId), nameof(VolumeId), nameof(ChapterId), nameof(AppUserId), IsUnique = true)] - public class AppUserProgress : IEntityDate - { - /// - /// Id of Entity - /// - public int Id { get; set; } - /// - /// Pages Read for given Chapter - /// - public int PagesRead { get; set; } - /// - /// Volume belonging to Chapter - /// - public int VolumeId { get; set; } - /// - /// Series belonging to Chapter - /// - public int SeriesId { get; set; } - /// - /// Chapter - /// - public int ChapterId { get; set; } - /// - /// For Book Reader, represents the nearest passed anchor on the screen that can be used to resume scroll point - /// on next load - /// - public string BookScrollId { get; set; } - /// - /// When this was first created - /// - public DateTime Created { get; set; } - /// - /// Last date this was updated - /// - public DateTime LastModified { get; set; } + public int Id { get; set; } + /// + /// Pages Read for given Chapter + /// + public int PagesRead { get; set; } + /// + /// Volume belonging to Chapter + /// + public int VolumeId { get; set; } + /// + /// Series belonging to Chapter + /// + public int SeriesId { get; set; } + /// + /// Chapter + /// + public int ChapterId { get; set; } + /// + /// For Book Reader, represents the nearest passed anchor on the screen that can be used to resume scroll point + /// on next load + /// + public string BookScrollId { get; set; } + /// + /// When this was first created + /// + public DateTime Created { get; set; } + /// + /// Last date this was updated + /// + public DateTime LastModified { get; set; } - // Relationships - /// - /// Navigational Property for EF. Links to a unique AppUser - /// - public AppUser AppUser { get; set; } - /// - /// User this progress belongs to - /// - public int AppUserId { get; set; } - } + // Relationships + /// + /// Navigational Property for EF. Links to a unique AppUser + /// + public AppUser AppUser { get; set; } + /// + /// User this progress belongs to + /// + public int AppUserId { get; set; } } diff --git a/API/Entities/AppUserRating.cs b/API/Entities/AppUserRating.cs index ca176e7ae..54376bbd1 100644 --- a/API/Entities/AppUserRating.cs +++ b/API/Entities/AppUserRating.cs @@ -1,22 +1,21 @@  -namespace API.Entities +namespace API.Entities; + +public class AppUserRating { - public class AppUserRating - { - public int Id { get; set; } - /// - /// A number between 0-5 that represents how good a series is. - /// - public int Rating { get; set; } - /// - /// A short summary the user can write when giving their review. - /// - public string Review { get; set; } - public int SeriesId { get; set; } - - - // Relationships - public int AppUserId { get; set; } - public AppUser AppUser { get; set; } - } -} \ No newline at end of file + public int Id { get; set; } + /// + /// A number between 0-5 that represents how good a series is. + /// + public int Rating { get; set; } + /// + /// A short summary the user can write when giving their review. + /// + public string Review { get; set; } + public int SeriesId { get; set; } + + + // Relationships + public int AppUserId { get; set; } + public AppUser AppUser { get; set; } +} diff --git a/API/Entities/AppUserRole.cs b/API/Entities/AppUserRole.cs index b4c73f87e..09ccbce6c 100644 --- a/API/Entities/AppUserRole.cs +++ b/API/Entities/AppUserRole.cs @@ -1,10 +1,9 @@ using Microsoft.AspNetCore.Identity; -namespace API.Entities +namespace API.Entities; + +public class AppUserRole : IdentityUserRole { - public class AppUserRole : IdentityUserRole - { - public AppUser User { get; set; } - public AppRole Role { get; set; } - } -} \ No newline at end of file + public AppUser User { get; set; } + public AppRole Role { get; set; } +} diff --git a/API/Entities/Chapter.cs b/API/Entities/Chapter.cs index de989a503..cc0db195c 100644 --- a/API/Entities/Chapter.cs +++ b/API/Entities/Chapter.cs @@ -5,116 +5,116 @@ using API.Entities.Interfaces; using API.Parser; using API.Services; -namespace API.Entities +namespace API.Entities; + +public class Chapter : IEntityDate, IHasReadTimeEstimate { - public class Chapter : IEntityDate, IHasReadTimeEstimate + public int Id { get; set; } + /// + /// Range of numbers. Chapter 2-4 -> "2-4". Chapter 2 -> "2". + /// + public string Range { get; set; } + /// + /// Smallest number of the Range. Can be a partial like Chapter 4.5 + /// + public string Number { get; set; } + /// + /// The files that represent this Chapter + /// + public ICollection Files { get; set; } + public DateTime Created { get; set; } + public DateTime LastModified { get; set; } + /// + /// Relative path to the (managed) image file representing the cover image + /// + /// The file is managed internally to Kavita's APPDIR + public string CoverImage { get; set; } + public bool CoverImageLocked { get; set; } + /// + /// Total number of pages in all MangaFiles + /// + public int Pages { get; set; } + /// + /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename + /// + public bool IsSpecial { get; set; } + /// + /// Used for books/specials to display custom title. For non-specials/books, will be set to + /// + public string Title { get; set; } + /// + /// Age Rating for the issue/chapter + /// + public AgeRating AgeRating { get; set; } + + /// + /// Chapter title + /// + /// This should not be confused with Title which is used for special filenames. + public string TitleName { get; set; } = string.Empty; + /// + /// Date which chapter was released + /// + public DateTime ReleaseDate { get; set; } + /// + /// Summary for the Chapter/Issue + /// + public string Summary { get; set; } + /// + /// Language for the Chapter/Issue + /// + public string Language { get; set; } + /// + /// Total number of issues or volumes in the series + /// + /// Users may use Volume count or issue count. Kavita performs some light logic to help Count match up with TotalCount + public int TotalCount { get; set; } = 0; + /// + /// Number of the Total Count (progress the Series is complete) + /// + public int Count { get; set; } = 0; + + /// + /// Total Word count of all chapters in this chapter. + /// + /// Word Count is only available from EPUB files + public long WordCount { get; set; } + /// + public int MinHoursToRead { get; set; } + /// + public int MaxHoursToRead { get; set; } + /// + public int AvgHoursToRead { get; set; } + + + /// + /// All people attached at a Chapter level. Usually Comics will have different people per issue. + /// + public ICollection People { get; set; } = new List(); + /// + /// Genres for the Chapter + /// + public ICollection Genres { get; set; } = new List(); + public ICollection Tags { get; set; } = new List(); + + + + + // Relationships + public Volume Volume { get; set; } + public int VolumeId { get; set; } + + public void UpdateFrom(ParserInfo info) { - public int Id { get; set; } - /// - /// Range of numbers. Chapter 2-4 -> "2-4". Chapter 2 -> "2". - /// - public string Range { get; set; } - /// - /// Smallest number of the Range. Can be a partial like Chapter 4.5 - /// - public string Number { get; set; } - /// - /// The files that represent this Chapter - /// - public ICollection Files { get; set; } - public DateTime Created { get; set; } - public DateTime LastModified { get; set; } - /// - /// Relative path to the (managed) image file representing the cover image - /// - /// The file is managed internally to Kavita's APPDIR - public string CoverImage { get; set; } - public bool CoverImageLocked { get; set; } - /// - /// Total number of pages in all MangaFiles - /// - public int Pages { get; set; } - /// - /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename - /// - public bool IsSpecial { get; set; } - /// - /// Used for books/specials to display custom title. For non-specials/books, will be set to - /// - public string Title { get; set; } - /// - /// Age Rating for the issue/chapter - /// - public AgeRating AgeRating { get; set; } - - /// - /// Chapter title - /// - /// This should not be confused with Title which is used for special filenames. - public string TitleName { get; set; } = string.Empty; - /// - /// Date which chapter was released - /// - public DateTime ReleaseDate { get; set; } - /// - /// Summary for the Chapter/Issue - /// - public string Summary { get; set; } - /// - /// Language for the Chapter/Issue - /// - public string Language { get; set; } - /// - /// Total number of issues in the series - /// - public int TotalCount { get; set; } = 0; - /// - /// Number in the Total Count - /// - public int Count { get; set; } = 0; - - /// - /// Total Word count of all chapters in this chapter. - /// - /// Word Count is only available from EPUB files - public long WordCount { get; set; } - /// - public int MinHoursToRead { get; set; } - /// - public int MaxHoursToRead { get; set; } - /// - public int AvgHoursToRead { get; set; } - - - /// - /// All people attached at a Chapter level. Usually Comics will have different people per issue. - /// - public ICollection People { get; set; } = new List(); - /// - /// Genres for the Chapter - /// - public ICollection Genres { get; set; } = new List(); - public ICollection Tags { get; set; } = new List(); - - - - - // Relationships - public Volume Volume { get; set; } - public int VolumeId { get; set; } - - public void UpdateFrom(ParserInfo info) + Files ??= new List(); + IsSpecial = info.IsSpecialInfo(); + if (IsSpecial) { - Files ??= new List(); - IsSpecial = info.IsSpecialInfo(); - if (IsSpecial) - { - Number = "0"; - } - Title = (IsSpecial && info.Format == MangaFormat.Epub) - ? info.Title - : Range; - + Number = "0"; } + Title = (IsSpecial && info.Format == MangaFormat.Epub) + ? info.Title + : Range; + } } diff --git a/API/Entities/CollectionTag.cs b/API/Entities/CollectionTag.cs index b38960f89..f32e981e9 100644 --- a/API/Entities/CollectionTag.cs +++ b/API/Entities/CollectionTag.cs @@ -2,56 +2,55 @@ using API.Entities.Metadata; using Microsoft.EntityFrameworkCore; -namespace API.Entities +namespace API.Entities; + +/// +/// Represents a user entered field that is used as a tagging and grouping mechanism +/// +[Index(nameof(Id), nameof(Promoted), IsUnique = true)] +public class CollectionTag { + public int Id { get; set; } /// - /// Represents a user entered field that is used as a tagging and grouping mechanism + /// Visible title of the Tag /// - [Index(nameof(Id), nameof(Promoted), IsUnique = true)] - public class CollectionTag + public string Title { get; set; } + /// + /// Absolute path to the (managed) image file + /// + /// The file is managed internally to Kavita's APPDIR + public string CoverImage { get; set; } + /// + /// Denotes if the CoverImage has been overridden by the user. If so, it will not be updated during normal scan operations. + /// + public bool CoverImageLocked { get; set; } + + /// + /// A description of the tag + /// + public string Summary { get; set; } + + /// + /// A normalized string used to check if the tag already exists in the DB + /// + public string NormalizedTitle { get; set; } + /// + /// A promoted collection tag will allow all linked seriesMetadata's Series to show for all users. + /// + public bool Promoted { get; set; } + + public ICollection SeriesMetadatas { get; set; } + + /// + /// Not Used due to not using concurrency update + /// + public uint RowVersion { get; private set; } + + /// + /// Not Used due to not using concurrency update + /// + public void OnSavingChanges() { - public int Id { get; set; } - /// - /// Visible title of the Tag - /// - public string Title { get; set; } - /// - /// Absolute path to the (managed) image file - /// - /// The file is managed internally to Kavita's APPDIR - public string CoverImage { get; set; } - /// - /// Denotes if the CoverImage has been overridden by the user. If so, it will not be updated during normal scan operations. - /// - public bool CoverImageLocked { get; set; } - - /// - /// A description of the tag - /// - public string Summary { get; set; } - - /// - /// A normalized string used to check if the tag already exists in the DB - /// - public string NormalizedTitle { get; set; } - /// - /// A promoted collection tag will allow all linked seriesMetadata's Series to show for all users. - /// - public bool Promoted { get; set; } - - public ICollection SeriesMetadatas { get; set; } - - /// - /// Not Used due to not using concurrency update - /// - public uint RowVersion { get; private set; } - - /// - /// Not Used due to not using concurrency update - /// - public void OnSavingChanges() - { - RowVersion++; - } + RowVersion++; } } diff --git a/API/Entities/Device.cs b/API/Entities/Device.cs new file mode 100644 index 000000000..e4ceabff5 --- /dev/null +++ b/API/Entities/Device.cs @@ -0,0 +1,46 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations.Schema; +using System.Net; +using API.Entities.Enums.Device; +using API.Entities.Interfaces; + +namespace API.Entities; + +/// +/// A Device is an entity that can receive data from Kavita (kindle) +/// +public class Device : IEntityDate +{ + public int Id { get; set; } + /// + /// Last Seen IP Address of the device + /// + public string IpAddress { get; set; } + /// + /// A name given to this device + /// + /// If this device is web, this will be the browser name + /// Pixel 3a, John's Kindle + public string Name { get; set; } + /// + /// An email address associated with the device (ie Kindle). Will be used with Send to functionality + /// + public string EmailAddress { get; set; } + /// + /// Platform (ie) Windows 10 + /// + public DevicePlatform Platform { get; set; } + + public int AppUserId { get; set; } + public AppUser AppUser { get; set; } + + + /// + /// Last time this device was used to send a file + /// + public DateTime LastUsed { get; set; } + public DateTime Created { get; set; } + public DateTime LastModified { get; set; } +} diff --git a/API/Entities/Enums/AgeRating.cs b/API/Entities/Enums/AgeRating.cs index 82dbef7ae..9eefb9fa7 100644 --- a/API/Entities/Enums/AgeRating.cs +++ b/API/Entities/Enums/AgeRating.cs @@ -8,6 +8,11 @@ namespace API.Entities.Enums; /// Based on ComicInfo.xml v2.1 https://github.com/anansi-project/comicinfo/blob/main/drafts/v2.1/ComicInfo.xsd public enum AgeRating { + /// + /// This is for Age Restriction for Restricted Profiles + /// + [Description("Not Applicable")] + NotApplicable = -1, [Description("Unknown")] Unknown = 0, [Description("Rating Pending")] diff --git a/API/Entities/Enums/Device/DevicePlatform.cs b/API/Entities/Enums/Device/DevicePlatform.cs new file mode 100644 index 000000000..9b91bddbc --- /dev/null +++ b/API/Entities/Enums/Device/DevicePlatform.cs @@ -0,0 +1,25 @@ +using System.ComponentModel; + +namespace API.Entities.Enums.Device; + +public enum DevicePlatform +{ + [Description("Custom")] + Custom = 0, + /// + /// PocketBook device, email ends in @pbsync.com + /// + [Description("PocketBook")] + PocketBook = 1, + /// + /// Kindle device, email ends in @kindle.com + /// + [Description("Kindle")] + Kindle = 2, + /// + /// Kobo device, + /// + [Description("Kobo")] + Kobo = 3, + +} diff --git a/API/Entities/Enums/LibraryType.cs b/API/Entities/Enums/LibraryType.cs index dd2c83b92..5f4ab1cc7 100644 --- a/API/Entities/Enums/LibraryType.cs +++ b/API/Entities/Enums/LibraryType.cs @@ -1,23 +1,22 @@ using System.ComponentModel; -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum LibraryType { - public enum LibraryType - { - /// - /// Uses Manga regex for filename parsing - /// - [Description("Manga")] - Manga = 0, - /// - /// Uses Comic regex for filename parsing - /// - [Description("Comic")] - Comic = 1, - /// - /// Uses Manga regex for filename parsing also uses epub metadata - /// - [Description("Book")] - Book = 2, - } + /// + /// Uses Manga regex for filename parsing + /// + [Description("Manga")] + Manga = 0, + /// + /// Uses Comic regex for filename parsing + /// + [Description("Comic")] + Comic = 1, + /// + /// Uses Manga regex for filename parsing also uses epub metadata + /// + [Description("Book")] + Book = 2, } diff --git a/API/Entities/Enums/MangaFormat.cs b/API/Entities/Enums/MangaFormat.cs index 07e34ed77..cea506471 100644 --- a/API/Entities/Enums/MangaFormat.cs +++ b/API/Entities/Enums/MangaFormat.cs @@ -1,38 +1,37 @@ using System.ComponentModel; -namespace API.Entities.Enums +namespace API.Entities.Enums; + +/// +/// Represents the format of the file +/// +public enum MangaFormat { /// - /// Represents the format of the file + /// Image file + /// See for supported extensions /// - public enum MangaFormat - { - /// - /// Image file - /// See for supported extensions - /// - [Description("Image")] - Image = 0, - /// - /// Archive based file - /// See for supported extensions - /// - [Description("Archive")] - Archive = 1, - /// - /// Unknown. Not used. - /// - [Description("Unknown")] - Unknown = 2, - /// - /// EPUB File - /// - [Description("EPUB")] - Epub = 3, - /// - /// PDF File - /// - [Description("PDF")] - Pdf = 4 - } + [Description("Image")] + Image = 0, + /// + /// Archive based file + /// See for supported extensions + /// + [Description("Archive")] + Archive = 1, + /// + /// Unknown. Not used. + /// + [Description("Unknown")] + Unknown = 2, + /// + /// EPUB File + /// + [Description("EPUB")] + Epub = 3, + /// + /// PDF File + /// + [Description("PDF")] + Pdf = 4 } diff --git a/API/Entities/Enums/PageSplitOption.cs b/API/Entities/Enums/PageSplitOption.cs index 5234a4cce..7b421240c 100644 --- a/API/Entities/Enums/PageSplitOption.cs +++ b/API/Entities/Enums/PageSplitOption.cs @@ -1,10 +1,9 @@ -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum PageSplitOption { - public enum PageSplitOption - { - SplitLeftToRight = 0, - SplitRightToLeft = 1, - NoSplit = 2, - FitSplit = 3 - } + SplitLeftToRight = 0, + SplitRightToLeft = 1, + NoSplit = 2, + FitSplit = 3 } diff --git a/API/Entities/Enums/PersonRole.cs b/API/Entities/Enums/PersonRole.cs index 238c808a0..bd84985c0 100644 --- a/API/Entities/Enums/PersonRole.cs +++ b/API/Entities/Enums/PersonRole.cs @@ -1,31 +1,30 @@ -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum PersonRole { - public enum PersonRole - { - /// - /// Another role, not covered by other types - /// - Other = 1, - /// - /// Author or Writer - /// - Writer = 3, - Penciller = 4, - Inker = 5, - Colorist = 6, - Letterer = 7, - CoverArtist = 8, - Editor = 9, - Publisher = 10, - /// - /// Represents a character/person within the story - /// - Character = 11, - /// - /// The Translator - /// - Translator = 12 + /// + /// Another role, not covered by other types + /// + Other = 1, + /// + /// Author or Writer + /// + Writer = 3, + Penciller = 4, + Inker = 5, + Colorist = 6, + Letterer = 7, + CoverArtist = 8, + Editor = 9, + Publisher = 10, + /// + /// Represents a character/person within the story + /// + Character = 11, + /// + /// The Translator + /// + Translator = 12 - } } diff --git a/API/Entities/Enums/ReaderMode.cs b/API/Entities/Enums/ReaderMode.cs index 94776252b..e1353ad59 100644 --- a/API/Entities/Enums/ReaderMode.cs +++ b/API/Entities/Enums/ReaderMode.cs @@ -1,14 +1,13 @@ using System.ComponentModel; -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum ReaderMode { - public enum ReaderMode - { - [Description("Left and Right")] - LeftRight = 0, - [Description("Up and Down")] - UpDown = 1, - [Description("Webtoon")] - Webtoon = 2 - } + [Description("Left and Right")] + LeftRight = 0, + [Description("Up and Down")] + UpDown = 1, + [Description("Webtoon")] + Webtoon = 2 } diff --git a/API/Entities/Enums/ReadingDirection.cs b/API/Entities/Enums/ReadingDirection.cs index e702970c9..8804ca6d4 100644 --- a/API/Entities/Enums/ReadingDirection.cs +++ b/API/Entities/Enums/ReadingDirection.cs @@ -1,8 +1,7 @@ -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum ReadingDirection { - public enum ReadingDirection - { - LeftToRight = 0, - RightToLeft = 1 - } -} \ No newline at end of file + LeftToRight = 0, + RightToLeft = 1 +} diff --git a/API/Entities/Enums/RelationKind.cs b/API/Entities/Enums/RelationKind.cs index c01ab9571..aa10e6816 100644 --- a/API/Entities/Enums/RelationKind.cs +++ b/API/Entities/Enums/RelationKind.cs @@ -61,6 +61,16 @@ public enum RelationKind /// Doujinshi or Fan work /// [Description("Doujinshi")] - Doujinshi = 11 + Doujinshi = 11, + /// + /// This is a UI field only. Not to be used in backend + /// + [Description("Parent")] + Parent = 12, + /// + /// Same story, could be translation, colorization... Different edition of the series + /// + [Description("Edition")] + Edition = 13 } diff --git a/API/Entities/Enums/ScalingOption.cs b/API/Entities/Enums/ScalingOption.cs index 2da3b79f7..f0b357898 100644 --- a/API/Entities/Enums/ScalingOption.cs +++ b/API/Entities/Enums/ScalingOption.cs @@ -1,10 +1,9 @@ -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum ScalingOption { - public enum ScalingOption - { - FitToHeight = 0, - FitToWidth = 1, - Original = 2, - Automatic = 3 - } -} \ No newline at end of file + FitToHeight = 0, + FitToWidth = 1, + Original = 2, + Automatic = 3 +} diff --git a/API/Entities/Enums/ServerSettingKey.cs b/API/Entities/Enums/ServerSettingKey.cs index 3fcf938b2..5c4ac7bf8 100644 --- a/API/Entities/Enums/ServerSettingKey.cs +++ b/API/Entities/Enums/ServerSettingKey.cs @@ -1,100 +1,104 @@ using System.ComponentModel; -namespace API.Entities.Enums +namespace API.Entities.Enums; + +public enum ServerSettingKey { - public enum ServerSettingKey - { - /// - /// Cron format for how often full library scans are performed. - /// - [Description("TaskScan")] - TaskScan = 0, - /// - /// Where files are cached. Not currently used. - /// - [Description("CacheDirectory")] - CacheDirectory = 1, - /// - /// Cron format for how often backups are taken. - /// - [Description("TaskBackup")] - TaskBackup = 2, - /// - /// Logging level for Server. Not managed in DB. Managed in appsettings.json and synced to DB. - /// - [Description("LoggingLevel")] - LoggingLevel = 3, - /// - /// Port server listens on. Not managed in DB. Managed in appsettings.json and synced to DB. - /// - [Description("Port")] - Port = 4, - /// - /// Where the backups are stored. - /// - [Description("BackupDirectory")] - BackupDirectory = 5, - /// - /// Allow anonymous data to be reported to KavitaStats - /// - [Description("AllowStatCollection")] - AllowStatCollection = 6, - /// - /// Is OPDS enabled for the server - /// - [Description("EnableOpds")] - EnableOpds = 7, - /// - /// Is Authentication needed for non-admin accounts - /// - /// Deprecated. This is no longer used v0.5.1+. Assume Authentication is always in effect - [Description("EnableAuthentication")] - EnableAuthentication = 8, - /// - /// Base Url for the server. Not Implemented. - /// - [Description("BaseUrl")] - BaseUrl = 9, - /// - /// Represents this installation of Kavita. Is tied to Stat reporting but has no information about user or files. - /// - [Description("InstallId")] - InstallId = 10, - /// - /// Represents the version the software is running. - /// - /// This will be updated on Startup to the latest release. Provides ability to detect if certain migrations need to be run. - [Description("InstallVersion")] - InstallVersion = 11, - /// - /// Location of where bookmarks are stored - /// - [Description("BookmarkDirectory")] - BookmarkDirectory = 12, - /// - /// If SMTP is enabled on the server - /// - [Description("CustomEmailService")] - EmailServiceUrl = 13, - /// - /// If Kavita should save bookmarks as WebP images - /// - [Description("ConvertBookmarkToWebP")] - ConvertBookmarkToWebP = 14, - /// - /// If the Swagger UI Should be exposed. Does not require authentication, but does require a JWT. - /// - [Description("EnableSwaggerUi")] - EnableSwaggerUi = 15, - /// - /// Total Number of Backups to maintain before cleaning. Default 30, min 1. - /// - [Description("TotalBackups")] - TotalBackups = 16, - /// - /// If Kavita should watch the library folders and process changes - /// - [Description("EnableFolderWatching")] - EnableFolderWatching = 17, - } + /// + /// Cron format for how often full library scans are performed. + /// + [Description("TaskScan")] + TaskScan = 0, + /// + /// Where files are cached. Not currently used. + /// + [Description("CacheDirectory")] + CacheDirectory = 1, + /// + /// Cron format for how often backups are taken. + /// + [Description("TaskBackup")] + TaskBackup = 2, + /// + /// Logging level for Server. Not managed in DB. Managed in appsettings.json and synced to DB. + /// + [Description("LoggingLevel")] + LoggingLevel = 3, + /// + /// Port server listens on. Not managed in DB. Managed in appsettings.json and synced to DB. + /// + [Description("Port")] + Port = 4, + /// + /// Where the backups are stored. + /// + [Description("BackupDirectory")] + BackupDirectory = 5, + /// + /// Allow anonymous data to be reported to KavitaStats + /// + [Description("AllowStatCollection")] + AllowStatCollection = 6, + /// + /// Is OPDS enabled for the server + /// + [Description("EnableOpds")] + EnableOpds = 7, + /// + /// Is Authentication needed for non-admin accounts + /// + /// Deprecated. This is no longer used v0.5.1+. Assume Authentication is always in effect + [Description("EnableAuthentication")] + EnableAuthentication = 8, + /// + /// Base Url for the server. Not Implemented. + /// + [Description("BaseUrl")] + BaseUrl = 9, + /// + /// Represents this installation of Kavita. Is tied to Stat reporting but has no information about user or files. + /// + [Description("InstallId")] + InstallId = 10, + /// + /// Represents the version the software is running. + /// + /// This will be updated on Startup to the latest release. Provides ability to detect if certain migrations need to be run. + [Description("InstallVersion")] + InstallVersion = 11, + /// + /// Location of where bookmarks are stored + /// + [Description("BookmarkDirectory")] + BookmarkDirectory = 12, + /// + /// If SMTP is enabled on the server + /// + [Description("CustomEmailService")] + EmailServiceUrl = 13, + /// + /// If Kavita should save bookmarks as WebP images + /// + [Description("ConvertBookmarkToWebP")] + ConvertBookmarkToWebP = 14, + /// + /// If the Swagger UI Should be exposed. Does not require authentication, but does require a JWT. + /// + [Description("EnableSwaggerUi")] + EnableSwaggerUi = 15, + /// + /// Total Number of Backups to maintain before cleaning. Default 30, min 1. + /// + [Description("TotalBackups")] + TotalBackups = 16, + /// + /// If Kavita should watch the library folders and process changes + /// + [Description("EnableFolderWatching")] + EnableFolderWatching = 17, + /// + /// Total number of days worth of logs to keep + /// + [Description("TotalLogs")] + TotalLogs = 18, } diff --git a/API/Entities/FolderPath.cs b/API/Entities/FolderPath.cs index 20ba4f466..fe0e73493 100644 --- a/API/Entities/FolderPath.cs +++ b/API/Entities/FolderPath.cs @@ -1,20 +1,19 @@  using System; -namespace API.Entities -{ - public class FolderPath - { - public int Id { get; set; } - public string Path { get; set; } - /// - /// Used when scanning to see if we can skip if nothing has changed - /// - /// Time stored in UTC - public DateTime LastScanned { get; set; } +namespace API.Entities; - // Relationship - public Library Library { get; set; } - public int LibraryId { get; set; } - } +public class FolderPath +{ + public int Id { get; set; } + public string Path { get; set; } + /// + /// Used when scanning to see if we can skip if nothing has changed + /// + /// Time stored in UTC + public DateTime LastScanned { get; set; } + + // Relationship + public Library Library { get; set; } + public int LibraryId { get; set; } } diff --git a/API/Entities/Genre.cs b/API/Entities/Genre.cs index 447f14943..ec9cdde0e 100644 --- a/API/Entities/Genre.cs +++ b/API/Entities/Genre.cs @@ -2,17 +2,16 @@ using API.Entities.Metadata; using Microsoft.EntityFrameworkCore; -namespace API.Entities -{ - [Index(nameof(NormalizedTitle), nameof(ExternalTag), IsUnique = true)] - public class Genre - { - public int Id { get; set; } - public string Title { get; set; } - public string NormalizedTitle { get; set; } - public bool ExternalTag { get; set; } +namespace API.Entities; - public ICollection SeriesMetadatas { get; set; } - public ICollection Chapters { get; set; } - } +[Index(nameof(NormalizedTitle), nameof(ExternalTag), IsUnique = true)] +public class Genre +{ + public int Id { get; set; } + public string Title { get; set; } + public string NormalizedTitle { get; set; } + public bool ExternalTag { get; set; } + + public ICollection SeriesMetadatas { get; set; } + public ICollection Chapters { get; set; } } diff --git a/API/Entities/Interfaces/IEntityDate.cs b/API/Entities/Interfaces/IEntityDate.cs index 79330546e..11b4e8969 100644 --- a/API/Entities/Interfaces/IEntityDate.cs +++ b/API/Entities/Interfaces/IEntityDate.cs @@ -1,10 +1,9 @@ using System; -namespace API.Entities.Interfaces +namespace API.Entities.Interfaces; + +public interface IEntityDate { - public interface IEntityDate - { - DateTime Created { get; set; } - DateTime LastModified { get; set; } - } -} \ No newline at end of file + DateTime Created { get; set; } + DateTime LastModified { get; set; } +} diff --git a/API/Entities/Interfaces/IHasConcurrencyToken.cs b/API/Entities/Interfaces/IHasConcurrencyToken.cs index 9372f1eb7..3cd3f1adf 100644 --- a/API/Entities/Interfaces/IHasConcurrencyToken.cs +++ b/API/Entities/Interfaces/IHasConcurrencyToken.cs @@ -1,19 +1,18 @@ -namespace API.Entities.Interfaces +namespace API.Entities.Interfaces; + +/// +/// An interface abstracting an entity that has a concurrency token. +/// +public interface IHasConcurrencyToken { /// - /// An interface abstracting an entity that has a concurrency token. + /// Gets the version of this row. Acts as a concurrency token. /// - public interface IHasConcurrencyToken - { - /// - /// Gets the version of this row. Acts as a concurrency token. - /// - uint RowVersion { get; } + uint RowVersion { get; } - /// - /// Called when saving changes to this entity. - /// - void OnSavingChanges(); + /// + /// Called when saving changes to this entity. + /// + void OnSavingChanges(); - } -} \ No newline at end of file +} diff --git a/API/Entities/Library.cs b/API/Entities/Library.cs index fd9956b1f..b6fac76f3 100644 --- a/API/Entities/Library.cs +++ b/API/Entities/Library.cs @@ -5,39 +5,26 @@ using System.Linq; using API.Entities.Enums; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +public class Library : IEntityDate { - public class Library : IEntityDate - { - public int Id { get; set; } - public string Name { get; set; } - /// - /// Update this summary with a way it's used, else let's remove it. - /// - [Obsolete("This has never been coded for. Likely we can remove it.")] - public string CoverImage { get; set; } - public LibraryType Type { get; set; } - public DateTime Created { get; set; } - public DateTime LastModified { get; set; } - /// - /// Last time Library was scanned - /// - /// Time stored in UTC - public DateTime LastScanned { get; set; } - public ICollection Folders { get; set; } - public ICollection AppUsers { get; set; } - public ICollection Series { get; set; } - - // Methods - /// - /// Has there been any modifications to the FolderPath's directory since the date - /// - /// - public bool AnyModificationsSinceLastScan() - { - // NOTE: I don't think we can do this due to NTFS - return Folders.All(folder => File.GetLastWriteTimeUtc(folder.Path) > folder.LastScanned); - } - - } + public int Id { get; set; } + public string Name { get; set; } + /// + /// This is not used, but planned once we build out a Library detail page + /// + [Obsolete("This has never been coded for. Likely we can remove it.")] + public string CoverImage { get; set; } + public LibraryType Type { get; set; } + public DateTime Created { get; set; } + public DateTime LastModified { get; set; } + /// + /// Last time Library was scanned + /// + /// Time stored in UTC + public DateTime LastScanned { get; set; } + public ICollection Folders { get; set; } + public ICollection AppUsers { get; set; } + public ICollection Series { get; set; } } diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs index da0a61924..5f78dd7f7 100644 --- a/API/Entities/MangaFile.cs +++ b/API/Entities/MangaFile.cs @@ -4,48 +4,47 @@ using System.IO; using API.Entities.Enums; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +/// +/// Represents a wrapper to the underlying file. This provides information around file, like number of pages, format, etc. +/// +public class MangaFile : IEntityDate { + public int Id { get; set; } /// - /// Represents a wrapper to the underlying file. This provides information around file, like number of pages, format, etc. + /// Absolute path to the archive file /// - public class MangaFile : IEntityDate + public string FilePath { get; set; } + /// + /// Number of pages for the given file + /// + public int Pages { get; set; } + public MangaFormat Format { get; set; } + /// + public DateTime Created { get; set; } + + /// + /// Last time underlying file was modified + /// + /// This gets updated anytime the file is scanned + public DateTime LastModified { get; set; } + /// + /// Last time file analysis ran on this file + /// + public DateTime LastFileAnalysis { get; set; } + + + // Relationship Mapping + public Chapter Chapter { get; set; } + public int ChapterId { get; set; } + + + /// + /// Updates the Last Modified time of the underlying file to the LastWriteTime + /// + public void UpdateLastModified() { - public int Id { get; set; } - /// - /// Absolute path to the archive file - /// - public string FilePath { get; set; } - /// - /// Number of pages for the given file - /// - public int Pages { get; set; } - public MangaFormat Format { get; set; } - /// - public DateTime Created { get; set; } - - /// - /// Last time underlying file was modified - /// - /// This gets updated anytime the file is scanned - public DateTime LastModified { get; set; } - /// - /// Last time file analysis ran on this file - /// - public DateTime LastFileAnalysis { get; set; } - - - // Relationship Mapping - public Chapter Chapter { get; set; } - public int ChapterId { get; set; } - - - /// - /// Updates the Last Modified time of the underlying file to the LastWriteTime - /// - public void UpdateLastModified() - { - LastModified = File.GetLastWriteTime(FilePath); - } + LastModified = File.GetLastWriteTime(FilePath); } } diff --git a/API/Entities/Metadata/SeriesMetadata.cs b/API/Entities/Metadata/SeriesMetadata.cs index 98e9fa8e9..ffadac211 100644 --- a/API/Entities/Metadata/SeriesMetadata.cs +++ b/API/Entities/Metadata/SeriesMetadata.cs @@ -4,84 +4,84 @@ using API.Entities.Enums; using API.Entities.Interfaces; using Microsoft.EntityFrameworkCore; -namespace API.Entities.Metadata +namespace API.Entities.Metadata; + +[Index(nameof(Id), nameof(SeriesId), IsUnique = true)] +public class SeriesMetadata : IHasConcurrencyToken { - [Index(nameof(Id), nameof(SeriesId), IsUnique = true)] - public class SeriesMetadata : IHasConcurrencyToken + public int Id { get; set; } + + public string Summary { get; set; } = string.Empty; + + public ICollection CollectionTags { get; set; } + + public ICollection Genres { get; set; } = new List(); + public ICollection Tags { get; set; } = new List(); + /// + /// All people attached at a Series level. + /// + public ICollection People { get; set; } = new List(); + + /// + /// Highest Age Rating from all Chapters + /// + public AgeRating AgeRating { get; set; } + /// + /// Earliest Year from all chapters + /// + public int ReleaseYear { get; set; } + /// + /// Language of the content (BCP-47 code) + /// + public string Language { get; set; } = string.Empty; + /// + /// Total number of issues/volumes in the series + /// + public int TotalCount { get; set; } = 0; + /// + /// Max number of issues/volumes in the series (Max of Volume/Issue field in ComicInfo) + /// + public int MaxCount { get; set; } = 0; + public PublicationStatus PublicationStatus { get; set; } + + // Locks + public bool LanguageLocked { get; set; } + public bool SummaryLocked { get; set; } + /// + /// Locked by user so metadata updates from scan loop will not override AgeRating + /// + public bool AgeRatingLocked { get; set; } + /// + /// Locked by user so metadata updates from scan loop will not override PublicationStatus + /// + public bool PublicationStatusLocked { get; set; } + public bool GenresLocked { get; set; } + public bool TagsLocked { get; set; } + public bool WriterLocked { get; set; } + public bool CharacterLocked { get; set; } + public bool ColoristLocked { get; set; } + public bool EditorLocked { get; set; } + public bool InkerLocked { get; set; } + public bool LettererLocked { get; set; } + public bool PencillerLocked { get; set; } + public bool PublisherLocked { get; set; } + public bool TranslatorLocked { get; set; } + public bool CoverArtistLocked { get; set; } + public bool ReleaseYearLocked { get; set; } + + + // Relationship + public Series Series { get; set; } + public int SeriesId { get; set; } + + /// + [ConcurrencyCheck] + public uint RowVersion { get; private set; } + + + /// + public void OnSavingChanges() { - public int Id { get; set; } - - public string Summary { get; set; } = string.Empty; - - public ICollection CollectionTags { get; set; } - - public ICollection Genres { get; set; } = new List(); - public ICollection Tags { get; set; } = new List(); - /// - /// All people attached at a Series level. - /// - public ICollection People { get; set; } = new List(); - - /// - /// Highest Age Rating from all Chapters - /// - public AgeRating AgeRating { get; set; } - /// - /// Earliest Year from all chapters - /// - public int ReleaseYear { get; set; } - /// - /// Language of the content (BCP-47 code) - /// - public string Language { get; set; } = string.Empty; - /// - /// Total number of issues/volumes in the series - /// - public int TotalCount { get; set; } = 0; - /// - /// Max number of issues/volumes in the series (Max of Volume/Issue field in ComicInfo) - /// - public int MaxCount { get; set; } = 0; - public PublicationStatus PublicationStatus { get; set; } - - // Locks - public bool LanguageLocked { get; set; } - public bool SummaryLocked { get; set; } - /// - /// Locked by user so metadata updates from scan loop will not override AgeRating - /// - public bool AgeRatingLocked { get; set; } - /// - /// Locked by user so metadata updates from scan loop will not override PublicationStatus - /// - public bool PublicationStatusLocked { get; set; } - public bool GenresLocked { get; set; } - public bool TagsLocked { get; set; } - public bool WriterLocked { get; set; } - public bool CharacterLocked { get; set; } - public bool ColoristLocked { get; set; } - public bool EditorLocked { get; set; } - public bool InkerLocked { get; set; } - public bool LettererLocked { get; set; } - public bool PencillerLocked { get; set; } - public bool PublisherLocked { get; set; } - public bool TranslatorLocked { get; set; } - public bool CoverArtistLocked { get; set; } - - - // Relationship - public Series Series { get; set; } - public int SeriesId { get; set; } - - /// - [ConcurrencyCheck] - public uint RowVersion { get; private set; } - - - /// - public void OnSavingChanges() - { - RowVersion++; - } + RowVersion++; } } diff --git a/API/Entities/Metadata/SeriesRelation.cs b/API/Entities/Metadata/SeriesRelation.cs index 46e6c34b9..bb152264a 100644 --- a/API/Entities/Metadata/SeriesRelation.cs +++ b/API/Entities/Metadata/SeriesRelation.cs @@ -8,18 +8,18 @@ namespace API.Entities.Metadata; /// A relation flows between one series and another. /// Series ---kind---> target /// -public class SeriesRelation +public sealed class SeriesRelation { public int Id { get; set; } public RelationKind RelationKind { get; set; } - public virtual Series TargetSeries { get; set; } + public Series TargetSeries { get; set; } /// /// A is Sequel to B. In this example, TargetSeries is A. B will hold the foreign key. /// public int TargetSeriesId { get; set; } // Relationships - public virtual Series Series { get; set; } + public Series Series { get; set; } public int SeriesId { get; set; } } diff --git a/API/Entities/Person.cs b/API/Entities/Person.cs index 785a037bd..4029b6af9 100644 --- a/API/Entities/Person.cs +++ b/API/Entities/Person.cs @@ -2,23 +2,22 @@ using API.Entities.Enums; using API.Entities.Metadata; -namespace API.Entities -{ - public enum ProviderSource - { - Local = 1, - External = 2 - } - public class Person - { - public int Id { get; set; } - public string Name { get; set; } - public string NormalizedName { get; set; } - public PersonRole Role { get; set; } - //public ProviderSource Source { get; set; } +namespace API.Entities; - // Relationships - public ICollection SeriesMetadatas { get; set; } - public ICollection ChapterMetadatas { get; set; } - } +public enum ProviderSource +{ + Local = 1, + External = 2 +} +public class Person +{ + public int Id { get; set; } + public string Name { get; set; } + public string NormalizedName { get; set; } + public PersonRole Role { get; set; } + //public ProviderSource Source { get; set; } + + // Relationships + public ICollection SeriesMetadatas { get; set; } + public ICollection ChapterMetadatas { get; set; } } diff --git a/API/Entities/ReadingList.cs b/API/Entities/ReadingList.cs index b665203c4..6712fe923 100644 --- a/API/Entities/ReadingList.cs +++ b/API/Entities/ReadingList.cs @@ -1,39 +1,45 @@ using System; using System.Collections.Generic; +using API.Entities.Enums; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +/// +/// This is a collection of which represent individual chapters and an order. +/// +public class ReadingList : IEntityDate { + public int Id { get; init; } + public string Title { get; set; } /// - /// This is a collection of which represent individual chapters and an order. + /// A normalized string used to check if the reading list already exists in the DB /// - public class ReadingList : IEntityDate - { - public int Id { get; init; } - public string Title { get; set; } - /// - /// A normalized string used to check if the reading list already exists in the DB - /// - public string NormalizedTitle { get; set; } - public string Summary { get; set; } - /// - /// Reading lists that are promoted are only done by admins - /// - public bool Promoted { get; set; } - /// - /// Absolute path to the (managed) image file - /// - /// The file is managed internally to Kavita's APPDIR - public string CoverImage { get; set; } - public bool CoverImageLocked { get; set; } + public string NormalizedTitle { get; set; } + public string Summary { get; set; } + /// + /// Reading lists that are promoted are only done by admins + /// + public bool Promoted { get; set; } + /// + /// Absolute path to the (managed) image file + /// + /// The file is managed internally to Kavita's APPDIR + public string CoverImage { get; set; } + public bool CoverImageLocked { get; set; } - public ICollection Items { get; set; } - public DateTime Created { get; set; } - public DateTime LastModified { get; set; } + /// + /// The highest age rating from all Series within the reading list + /// + /// Introduced in v0.6 + public AgeRating AgeRating { get; set; } = AgeRating.Unknown; - // Relationships - public int AppUserId { get; set; } - public AppUser AppUser { get; set; } + public ICollection Items { get; set; } + public DateTime Created { get; set; } + public DateTime LastModified { get; set; } + + // Relationships + public int AppUserId { get; set; } + public AppUser AppUser { get; set; } - } } diff --git a/API/Entities/ReadingListItem.cs b/API/Entities/ReadingListItem.cs index a7c7982b2..a68042d3d 100644 --- a/API/Entities/ReadingListItem.cs +++ b/API/Entities/ReadingListItem.cs @@ -1,24 +1,23 @@ -namespace API.Entities +namespace API.Entities; + +public class ReadingListItem { - public class ReadingListItem - { - public int Id { get; init; } - public int SeriesId { get; set; } - public int VolumeId { get; set; } - public int ChapterId { get; set; } - /// - /// Order of the chapter within a Reading List - /// - public int Order { get; set; } + public int Id { get; init; } + public int SeriesId { get; set; } + public int VolumeId { get; set; } + public int ChapterId { get; set; } + /// + /// Order of the chapter within a Reading List + /// + public int Order { get; set; } - // Relationship - public ReadingList ReadingList { get; set; } - public int ReadingListId { get; set; } + // Relationship + public ReadingList ReadingList { get; set; } + public int ReadingListId { get; set; } - // Keep these for easy join statements - public Series Series { get; set; } - public Volume Volume { get; set; } - public Chapter Chapter { get; set; } + // Keep these for easy join statements + public Series Series { get; set; } + public Volume Volume { get; set; } + public Chapter Chapter { get; set; } - } } diff --git a/API/Entities/ServerSetting.cs b/API/Entities/ServerSetting.cs index 6c4b5f21d..277bb6569 100644 --- a/API/Entities/ServerSetting.cs +++ b/API/Entities/ServerSetting.cs @@ -2,25 +2,24 @@ using API.Entities.Enums; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +public class ServerSetting : IHasConcurrencyToken { - public class ServerSetting : IHasConcurrencyToken + [Key] + public ServerSettingKey Key { get; set; } + /// + /// The value of the Setting. Converter knows how to convert to the correct type + /// + public string Value { get; set; } + + /// + [ConcurrencyCheck] + public uint RowVersion { get; private set; } + + /// + public void OnSavingChanges() { - [Key] - public ServerSettingKey Key { get; set; } - /// - /// The value of the Setting. Converter knows how to convert to the correct type - /// - public string Value { get; set; } - - /// - [ConcurrencyCheck] - public uint RowVersion { get; private set; } - - /// - public void OnSavingChanges() - { - RowVersion++; - } + RowVersion++; } } diff --git a/API/Entities/Volume.cs b/API/Entities/Volume.cs index 06a61e11c..2caddbb73 100644 --- a/API/Entities/Volume.cs +++ b/API/Entities/Volume.cs @@ -2,45 +2,44 @@ using System.Collections.Generic; using API.Entities.Interfaces; -namespace API.Entities +namespace API.Entities; + +public class Volume : IEntityDate, IHasReadTimeEstimate { - public class Volume : IEntityDate, IHasReadTimeEstimate - { - public int Id { get; set; } - /// - /// A String representation of the volume number. Allows for floats. - /// - /// For Books with Series_index, this will map to the Series Index. - public string Name { get; set; } - /// - /// The minimum number in the Name field in Int form - /// - public int Number { get; set; } - public IList Chapters { get; set; } - public DateTime Created { get; set; } - public DateTime LastModified { get; set; } - /// - /// Absolute path to the (managed) image file - /// - /// The file is managed internally to Kavita's APPDIR - public string CoverImage { get; set; } - /// - /// Total pages of all chapters in this volume - /// - public int Pages { get; set; } - /// - /// Total Word count of all chapters in this volume. - /// - /// Word Count is only available from EPUB files - public long WordCount { get; set; } - public int MinHoursToRead { get; set; } - public int MaxHoursToRead { get; set; } - public int AvgHoursToRead { get; set; } + public int Id { get; set; } + /// + /// A String representation of the volume number. Allows for floats. + /// + /// For Books with Series_index, this will map to the Series Index. + public string Name { get; set; } + /// + /// The minimum number in the Name field in Int form + /// + public int Number { get; set; } + public IList Chapters { get; set; } + public DateTime Created { get; set; } + public DateTime LastModified { get; set; } + /// + /// Absolute path to the (managed) image file + /// + /// The file is managed internally to Kavita's APPDIR + public string CoverImage { get; set; } + /// + /// Total pages of all chapters in this volume + /// + public int Pages { get; set; } + /// + /// Total Word count of all chapters in this volume. + /// + /// Word Count is only available from EPUB files + public long WordCount { get; set; } + public int MinHoursToRead { get; set; } + public int MaxHoursToRead { get; set; } + public int AvgHoursToRead { get; set; } - // Relationships - public Series Series { get; set; } - public int SeriesId { get; set; } + // Relationships + public Series Series { get; set; } + public int SeriesId { get; set; } - } } diff --git a/API/Errors/ApiException.cs b/API/Errors/ApiException.cs index 1d570e8ff..d67a97f8a 100644 --- a/API/Errors/ApiException.cs +++ b/API/Errors/ApiException.cs @@ -1,16 +1,15 @@ -namespace API.Errors -{ - public class ApiException - { - public int Status { get; init; } - public string Message { get; init; } - public string Details { get; init; } +namespace API.Errors; - public ApiException(int status, string message = null, string details = null) - { - Status = status; - Message = message; - Details = details; - } +public class ApiException +{ + public int Status { get; init; } + public string Message { get; init; } + public string Details { get; init; } + + public ApiException(int status, string message = null, string details = null) + { + Status = status; + Message = message; + Details = details; } } diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index d4fa19258..ba2e2f6cf 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -7,82 +7,68 @@ using API.Services.Tasks.Metadata; using API.Services.Tasks.Scanner; using API.SignalR; using API.SignalR.Presence; -using Kavita.Common; using Microsoft.AspNetCore.Hosting; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -namespace API.Extensions +namespace API.Extensions; + +public static class ApplicationServiceExtensions { - public static class ApplicationServiceExtensions + public static void AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env) { - public static void AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env) + services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly); + + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + + services.AddScoped(); + services.AddScoped(); + + services.AddSqLite(env); + services.AddSignalR(opt => opt.EnableDetailedErrors = true); + } + + private static void AddSqLite(this IServiceCollection services, IHostEnvironment env) + { + services.AddDbContext(options => { - services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly); - - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - services.AddScoped(); - - - - services.AddScoped(); - services.AddScoped(); - - services.AddSqLite(config, env); - services.AddLogging(config); - services.AddSignalR(opt => opt.EnableDetailedErrors = true); - } - - private static void AddSqLite(this IServiceCollection services, IConfiguration config, - IHostEnvironment env) - { - services.AddDbContext(options => - { - options.UseSqlite(config.GetConnectionString("DefaultConnection")); - options.EnableDetailedErrors(); - options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.LogLevel.Equals("Debug")); - }); - } - - private static void AddLogging(this IServiceCollection services, IConfiguration config) - { - services.AddLogging(loggingBuilder => - { - var loggingSection = config.GetSection("Logging"); - loggingBuilder.AddFile(loggingSection); - }); - } + options.UseSqlite("Data source=config/kavita.db"); + options.EnableDetailedErrors(); + options.EnableSensitiveDataLogging(env.IsDevelopment()); + }); } } diff --git a/API/Extensions/ChapterListExtensions.cs b/API/Extensions/ChapterListExtensions.cs index a5d80bb92..c00fa1873 100644 --- a/API/Extensions/ChapterListExtensions.cs +++ b/API/Extensions/ChapterListExtensions.cs @@ -3,33 +3,42 @@ using System.Linq; using API.Entities; using API.Parser; -namespace API.Extensions -{ - public static class ChapterListExtensions - { - /// - /// Returns first chapter in the list with at least one file - /// - /// - /// - public static Chapter GetFirstChapterWithFiles(this IList chapters) - { - return chapters.FirstOrDefault(c => c.Files.Any()); - } +namespace API.Extensions; - /// - /// Gets a single chapter (or null if doesn't exist) where Range matches the info.Chapters property. If the info - /// is then, the filename is used to search against Range or if filename exists within Files of said Chapter. - /// - /// - /// - /// - public static Chapter GetChapterByRange(this IList chapters, ParserInfo info) - { - var specialTreatment = info.IsSpecialInfo(); - return specialTreatment - ? chapters.FirstOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath))) - : chapters.FirstOrDefault(c => c.Range == info.Chapters); - } +public static class ChapterListExtensions +{ + /// + /// Returns first chapter in the list with at least one file + /// + /// + /// + public static Chapter GetFirstChapterWithFiles(this IList chapters) + { + return chapters.FirstOrDefault(c => c.Files.Any()); + } + + /// + /// Gets a single chapter (or null if doesn't exist) where Range matches the info.Chapters property. If the info + /// is then, the filename is used to search against Range or if filename exists within Files of said Chapter. + /// + /// + /// + /// + public static Chapter GetChapterByRange(this IList chapters, ParserInfo info) + { + var specialTreatment = info.IsSpecialInfo(); + return specialTreatment + ? chapters.FirstOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath))) + : chapters.FirstOrDefault(c => c.Range == info.Chapters); + } + + /// + /// Returns the minimum Release Year from all Chapters that meets the year requirement (>= 1000) + /// + /// + /// + public static int MinimumReleaseYear(this IList chapters) + { + return chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).DefaultIfEmpty().Min(); } } diff --git a/API/Extensions/ClaimsPrincipalExtensions.cs b/API/Extensions/ClaimsPrincipalExtensions.cs index 61ece5676..f351aea42 100644 --- a/API/Extensions/ClaimsPrincipalExtensions.cs +++ b/API/Extensions/ClaimsPrincipalExtensions.cs @@ -1,15 +1,14 @@ using System.Security.Claims; using Kavita.Common; -namespace API.Extensions +namespace API.Extensions; + +public static class ClaimsPrincipalExtensions { - public static class ClaimsPrincipalExtensions + public static string GetUsername(this ClaimsPrincipal user) { - public static string GetUsername(this ClaimsPrincipal user) - { - var userClaim = user.FindFirst(ClaimTypes.NameIdentifier); - if (userClaim == null) throw new KavitaException("User is not authenticated"); - return userClaim.Value; - } + var userClaim = user.FindFirst(ClaimTypes.NameIdentifier); + if (userClaim == null) throw new KavitaException("User is not authenticated"); + return userClaim.Value; } -} \ No newline at end of file +} diff --git a/API/Extensions/ConfigurationExtensions.cs b/API/Extensions/ConfigurationExtensions.cs index 2388fee21..a5bfe7660 100644 --- a/API/Extensions/ConfigurationExtensions.cs +++ b/API/Extensions/ConfigurationExtensions.cs @@ -1,16 +1,15 @@ using Microsoft.Extensions.Configuration; -namespace API.Extensions +namespace API.Extensions; + +public static class ConfigurationExtensions { - public static class ConfigurationExtensions + public static int GetMaxRollingFiles(this IConfiguration config) { - public static int GetMaxRollingFiles(this IConfiguration config) - { - return int.Parse(config.GetSection("Logging").GetSection("File").GetSection("MaxRollingFiles").Value); - } - public static string GetLoggingFileName(this IConfiguration config) - { - return config.GetSection("Logging").GetSection("File").GetSection("Path").Value; - } + return int.Parse(config.GetSection("Logging").GetSection("File").GetSection("MaxRollingFiles").Value); } -} \ No newline at end of file + public static string GetLoggingFileName(this IConfiguration config) + { + return config.GetSection("Logging").GetSection("File").GetSection("Path").Value; + } +} diff --git a/API/Extensions/EnumerableExtensions.cs b/API/Extensions/EnumerableExtensions.cs index 30a75a9eb..679136efb 100644 --- a/API/Extensions/EnumerableExtensions.cs +++ b/API/Extensions/EnumerableExtensions.cs @@ -2,30 +2,43 @@ using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; +using API.Data.Misc; +using API.Entities.Enums; -namespace API.Extensions +namespace API.Extensions; + +public static class EnumerableExtensions { - public static class EnumerableExtensions + private static readonly Regex Regex = new Regex(@"\d+", RegexOptions.Compiled, TimeSpan.FromMilliseconds(500)); + + /// + /// A natural sort implementation + /// + /// IEnumerable to process + /// Function that produces a string. Does not support null values + /// Defaults to CurrentCulture + /// + /// Sorted Enumerable + public static IEnumerable OrderByNatural(this IEnumerable items, Func selector, StringComparer stringComparer = null) { - private static readonly Regex Regex = new Regex(@"\d+", RegexOptions.Compiled, TimeSpan.FromMilliseconds(500)); + var list = items.ToList(); + var maxDigits = list + .SelectMany(i => Regex.Matches(selector(i)) + .Select(digitChunk => (int?)digitChunk.Value.Length)) + .Max() ?? 0; - /// - /// A natural sort implementation - /// - /// IEnumerable to process - /// Function that produces a string. Does not support null values - /// Defaults to CurrentCulture - /// - /// Sorted Enumerable - public static IEnumerable OrderByNatural(this IEnumerable items, Func selector, StringComparer stringComparer = null) + return list.OrderBy(i => Regex.Replace(selector(i), match => match.Value.PadLeft(maxDigits, '0')), stringComparer ?? StringComparer.CurrentCulture); + } + + public static IEnumerable RestrictAgainstAgeRestriction(this IEnumerable items, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return items; + var q = items.Where(s => s.AgeRating <= restriction.AgeRating); + if (!restriction.IncludeUnknowns) { - var list = items.ToList(); - var maxDigits = list - .SelectMany(i => Regex.Matches(selector(i)) - .Select(digitChunk => (int?)digitChunk.Value.Length)) - .Max() ?? 0; - - return list.OrderBy(i => Regex.Replace(selector(i), match => match.Value.PadLeft(maxDigits, '0')), stringComparer ?? StringComparer.CurrentCulture); + return q.Where(s => s.AgeRating != AgeRating.Unknown); } + + return q; } } diff --git a/API/Extensions/FileInfoExtensions.cs b/API/Extensions/FileInfoExtensions.cs index f7e1291e7..1f4ea62e1 100644 --- a/API/Extensions/FileInfoExtensions.cs +++ b/API/Extensions/FileInfoExtensions.cs @@ -1,19 +1,18 @@ using System; using System.IO; -namespace API.Extensions +namespace API.Extensions; + +public static class FileInfoExtensions { - public static class FileInfoExtensions + /// + /// Checks if the last write time of the file is after the passed date + /// + /// + /// + /// + public static bool HasFileBeenModifiedSince(this FileInfo fileInfo, DateTime comparison) { - /// - /// Checks if the last write time of the file is after the passed date - /// - /// - /// - /// - public static bool HasFileBeenModifiedSince(this FileInfo fileInfo, DateTime comparison) - { - return DateTime.Compare(fileInfo.LastWriteTime, comparison) > 0; - } + return DateTime.Compare(fileInfo.LastWriteTime, comparison) > 0; } } diff --git a/API/Extensions/FilterDtoExtensions.cs b/API/Extensions/FilterDtoExtensions.cs index b0d9f80f6..bc5b4eb52 100644 --- a/API/Extensions/FilterDtoExtensions.cs +++ b/API/Extensions/FilterDtoExtensions.cs @@ -3,20 +3,19 @@ using System.Collections.Generic; using API.DTOs.Filtering; using API.Entities.Enums; -namespace API.Extensions +namespace API.Extensions; + +public static class FilterDtoExtensions { - public static class FilterDtoExtensions + private static readonly IList AllFormats = Enum.GetValues(); + + public static IList GetSqlFilter(this FilterDto filter) { - private static readonly IList AllFormats = Enum.GetValues(); - - public static IList GetSqlFilter(this FilterDto filter) + if (filter.Formats == null || filter.Formats.Count == 0) { - if (filter.Formats == null || filter.Formats.Count == 0) - { - return AllFormats; - } - - return filter.Formats; + return AllFormats; } + + return filter.Formats; } } diff --git a/API/Extensions/HttpExtensions.cs b/API/Extensions/HttpExtensions.cs index 419483fed..c7820284a 100644 --- a/API/Extensions/HttpExtensions.cs +++ b/API/Extensions/HttpExtensions.cs @@ -9,53 +9,52 @@ using API.Helpers; using Microsoft.AspNetCore.Http; using Microsoft.Net.Http.Headers; -namespace API.Extensions +namespace API.Extensions; + +public static class HttpExtensions { - public static class HttpExtensions + public static void AddPaginationHeader(this HttpResponse response, int currentPage, + int itemsPerPage, int totalItems, int totalPages) { - public static void AddPaginationHeader(this HttpResponse response, int currentPage, - int itemsPerPage, int totalItems, int totalPages) + var paginationHeader = new PaginationHeader(currentPage, itemsPerPage, totalItems, totalPages); + var options = new JsonSerializerOptions() { - var paginationHeader = new PaginationHeader(currentPage, itemsPerPage, totalItems, totalPages); - var options = new JsonSerializerOptions() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase - }; + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; - response.Headers.Add("Pagination", JsonSerializer.Serialize(paginationHeader, options)); - response.Headers.Add("Access-Control-Expose-Headers", "Pagination"); - } + response.Headers.Add("Pagination", JsonSerializer.Serialize(paginationHeader, options)); + response.Headers.Add("Access-Control-Expose-Headers", "Pagination"); + } - /// - /// Calculates SHA256 hash for a byte[] and sets as ETag. Ensures Cache-Control: private header is added. - /// - /// - /// If byte[] is null or empty, will only add cache-control - public static void AddCacheHeader(this HttpResponse response, byte[] content) + /// + /// Calculates SHA256 hash for a byte[] and sets as ETag. Ensures Cache-Control: private header is added. + /// + /// + /// If byte[] is null or empty, will only add cache-control + public static void AddCacheHeader(this HttpResponse response, byte[] content) + { + if (content is not {Length: > 0}) return; + using var sha1 = SHA256.Create(); + + response.Headers.Add(HeaderNames.ETag, string.Concat(sha1.ComputeHash(content).Select(x => x.ToString("X2")))); + response.Headers.CacheControl = $"private,max-age=100"; + } + + /// + /// Calculates SHA256 hash for a cover image filename and sets as ETag. Ensures Cache-Control: private header is added. + /// + /// + /// + /// Maximum amount of seconds to set for Cache-Control + public static void AddCacheHeader(this HttpResponse response, string filename, int maxAge = 10) + { + if (filename is not {Length: > 0}) return; + var hashContent = filename + File.GetLastWriteTimeUtc(filename); + using var sha1 = SHA256.Create(); + response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2")))); + if (maxAge != 10) { - if (content is not {Length: > 0}) return; - using var sha1 = SHA256.Create(); - - response.Headers.Add(HeaderNames.ETag, string.Concat(sha1.ComputeHash(content).Select(x => x.ToString("X2")))); - response.Headers.CacheControl = $"private,max-age=100"; - } - - /// - /// Calculates SHA256 hash for a cover image filename and sets as ETag. Ensures Cache-Control: private header is added. - /// - /// - /// - /// Maximum amount of seconds to set for Cache-Control - public static void AddCacheHeader(this HttpResponse response, string filename, int maxAge = 10) - { - if (filename is not {Length: > 0}) return; - var hashContent = filename + File.GetLastWriteTimeUtc(filename); - using var sha1 = SHA256.Create(); - response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2")))); - if (maxAge != 10) - { - response.Headers.CacheControl = $"max-age={maxAge}"; - } + response.Headers.CacheControl = $"max-age={maxAge}"; } } } diff --git a/API/Extensions/IdentityServiceExtensions.cs b/API/Extensions/IdentityServiceExtensions.cs index 5cc4718bb..6e958638a 100644 --- a/API/Extensions/IdentityServiceExtensions.cs +++ b/API/Extensions/IdentityServiceExtensions.cs @@ -10,79 +10,78 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.IdentityModel.Tokens; -namespace API.Extensions +namespace API.Extensions; + +public static class IdentityServiceExtensions { - public static class IdentityServiceExtensions + public static IServiceCollection AddIdentityServices(this IServiceCollection services, IConfiguration config) { - public static IServiceCollection AddIdentityServices(this IServiceCollection services, IConfiguration config) + services.Configure(options => { - services.Configure(options => + options.User.AllowedUserNameCharacters = + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._@+/"; + }); + + services.AddIdentityCore(opt => { - options.User.AllowedUserNameCharacters = - "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-._@+/"; - }); + opt.Password.RequireNonAlphanumeric = false; + opt.Password.RequireDigit = false; + opt.Password.RequireDigit = false; + opt.Password.RequireLowercase = false; + opt.Password.RequireUppercase = false; + opt.Password.RequireNonAlphanumeric = false; + opt.Password.RequiredLength = 6; - services.AddIdentityCore(opt => + opt.SignIn.RequireConfirmedEmail = false; + + opt.Lockout.AllowedForNewUsers = true; + opt.Lockout.DefaultLockoutTimeSpan = TimeSpan.FromMinutes(10); + opt.Lockout.MaxFailedAccessAttempts = 5; + + }) + .AddTokenProvider>(TokenOptions.DefaultProvider) + .AddRoles() + .AddRoleManager>() + .AddSignInManager>() + .AddRoleValidator>() + .AddEntityFrameworkStores(); + + + services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) + .AddJwtBearer(options => + { + options.TokenValidationParameters = new TokenValidationParameters() { - opt.Password.RequireNonAlphanumeric = false; - opt.Password.RequireDigit = false; - opt.Password.RequireDigit = false; - opt.Password.RequireLowercase = false; - opt.Password.RequireUppercase = false; - opt.Password.RequireNonAlphanumeric = false; - opt.Password.RequiredLength = 6; + ValidateIssuerSigningKey = true, + IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["TokenKey"])), + ValidateIssuer = false, + ValidateAudience = false, + ValidIssuer = "Kavita" + }; - opt.SignIn.RequireConfirmedEmail = true; - - opt.Lockout.AllowedForNewUsers = true; - opt.Lockout.DefaultLockoutTimeSpan = TimeSpan.FromMinutes(10); - opt.Lockout.MaxFailedAccessAttempts = 5; - - }) - .AddTokenProvider>(TokenOptions.DefaultProvider) - .AddRoles() - .AddRoleManager>() - .AddSignInManager>() - .AddRoleValidator>() - .AddEntityFrameworkStores(); - - - services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme) - .AddJwtBearer(options => + options.Events = new JwtBearerEvents() { - options.TokenValidationParameters = new TokenValidationParameters() + OnMessageReceived = context => { - ValidateIssuerSigningKey = true, - IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["TokenKey"])), - ValidateIssuer = false, - ValidateAudience = false, - ValidIssuer = "Kavita" - }; - - options.Events = new JwtBearerEvents() - { - OnMessageReceived = context => + var accessToken = context.Request.Query["access_token"]; + var path = context.HttpContext.Request.Path; + // Only use query string based token on SignalR hubs + if (!string.IsNullOrEmpty(accessToken) && path.StartsWithSegments("/hubs")) { - var accessToken = context.Request.Query["access_token"]; - var path = context.HttpContext.Request.Path; - // Only use query string based token on SignalR hubs - if (!string.IsNullOrEmpty(accessToken) && path.StartsWithSegments("/hubs")) - { - context.Token = accessToken; - } - - return Task.CompletedTask; + context.Token = accessToken; } - }; - }); - services.AddAuthorization(opt => - { - opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole)); - opt.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole)); - opt.AddPolicy("RequireChangePasswordRole", policy => policy.RequireRole(PolicyConstants.ChangePasswordRole, PolicyConstants.AdminRole)); - }); - return services; - } + return Task.CompletedTask; + } + }; + }); + services.AddAuthorization(opt => + { + opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole)); + opt.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole)); + opt.AddPolicy("RequireChangePasswordRole", policy => policy.RequireRole(PolicyConstants.ChangePasswordRole, PolicyConstants.AdminRole)); + }); + + return services; } } diff --git a/API/Extensions/ParserInfoListExtensions.cs b/API/Extensions/ParserInfoListExtensions.cs index 1bca8787b..9bea79ce9 100644 --- a/API/Extensions/ParserInfoListExtensions.cs +++ b/API/Extensions/ParserInfoListExtensions.cs @@ -3,31 +3,30 @@ using System.Linq; using API.Entities; using API.Parser; -namespace API.Extensions -{ - public static class ParserInfoListExtensions - { - /// - /// Selects distinct volume numbers by the "Volumes" key on the ParserInfo - /// - /// - /// - public static IList DistinctVolumes(this IList infos) - { - return infos.Select(p => p.Volumes).Distinct().ToList(); - } +namespace API.Extensions; - /// - /// Checks if a list of ParserInfos has a given chapter or not. Lookup occurs on Range property. If a chapter is - /// special, then the is matched, else the field is checked. - /// - /// - /// - /// - public static bool HasInfo(this IList infos, Chapter chapter) - { - return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range) - : infos.Any(v => v.Chapters == chapter.Range); - } +public static class ParserInfoListExtensions +{ + /// + /// Selects distinct volume numbers by the "Volumes" key on the ParserInfo + /// + /// + /// + public static IList DistinctVolumes(this IList infos) + { + return infos.Select(p => p.Volumes).Distinct().ToList(); + } + + /// + /// Checks if a list of ParserInfos has a given chapter or not. Lookup occurs on Range property. If a chapter is + /// special, then the is matched, else the field is checked. + /// + /// + /// + /// + public static bool HasInfo(this IList infos, Chapter chapter) + { + return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range) + : infos.Any(v => v.Chapters == chapter.Range); } } diff --git a/API/Extensions/QueryableExtensions.cs b/API/Extensions/QueryableExtensions.cs new file mode 100644 index 000000000..ec0b81257 --- /dev/null +++ b/API/Extensions/QueryableExtensions.cs @@ -0,0 +1,113 @@ +using System.Linq; +using System.Threading.Tasks; +using API.Data.Misc; +using API.Entities; +using API.Entities.Enums; +using Microsoft.EntityFrameworkCore; + +namespace API.Extensions; + +public static class QueryableExtensions +{ + public static IQueryable RestrictAgainstAgeRestriction(this IQueryable queryable, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return queryable; + var q = queryable.Where(s => s.Metadata.AgeRating <= restriction.AgeRating); + if (!restriction.IncludeUnknowns) + { + return q.Where(s => s.Metadata.AgeRating != AgeRating.Unknown); + } + + return q; + } + + public static IQueryable RestrictAgainstAgeRestriction(this IQueryable queryable, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return queryable; + + if (restriction.IncludeUnknowns) + { + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating)); + } + + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating && sm.AgeRating > AgeRating.Unknown)); + } + + public static IQueryable RestrictAgainstAgeRestriction(this IQueryable queryable, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return queryable; + + if (restriction.IncludeUnknowns) + { + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating)); + } + + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating && sm.AgeRating > AgeRating.Unknown)); + } + + public static IQueryable RestrictAgainstAgeRestriction(this IQueryable queryable, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return queryable; + + if (restriction.IncludeUnknowns) + { + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating)); + } + + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating && sm.AgeRating > AgeRating.Unknown)); + } + + public static IQueryable RestrictAgainstAgeRestriction(this IQueryable queryable, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return queryable; + + if (restriction.IncludeUnknowns) + { + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating)); + } + + return queryable.Where(c => c.SeriesMetadatas.All(sm => + sm.AgeRating <= restriction.AgeRating && sm.AgeRating > AgeRating.Unknown)); + } + + public static IQueryable RestrictAgainstAgeRestriction(this IQueryable queryable, AgeRestriction restriction) + { + if (restriction.AgeRating == AgeRating.NotApplicable) return queryable; + var q = queryable.Where(rl => rl.AgeRating <= restriction.AgeRating); + + if (!restriction.IncludeUnknowns) + { + return q.Where(rl => rl.AgeRating != AgeRating.Unknown); + } + + return q; + } + + public static Task GetUserAgeRestriction(this DbSet queryable, int userId) + { + if (userId < 1) + { + return Task.FromResult(new AgeRestriction() + { + AgeRating = AgeRating.NotApplicable, + IncludeUnknowns = true + }); + } + return queryable + .AsNoTracking() + .Where(u => u.Id == userId) + .Select(u => + new AgeRestriction(){ + AgeRating = u.AgeRestriction, + IncludeUnknowns = u.AgeRestrictionIncludeUnknowns + }) + .SingleAsync(); + } +} diff --git a/API/Extensions/SeriesExtensions.cs b/API/Extensions/SeriesExtensions.cs index acd828480..ad5ec3130 100644 --- a/API/Extensions/SeriesExtensions.cs +++ b/API/Extensions/SeriesExtensions.cs @@ -1,49 +1,71 @@ using System.Collections.Generic; using System.Linq; +using API.Comparators; using API.Entities; using API.Parser; using API.Services.Tasks.Scanner; -namespace API.Extensions +namespace API.Extensions; + +public static class SeriesExtensions { - public static class SeriesExtensions + /// + /// Checks against all the name variables of the Series if it matches anything in the list. This does not check against format. + /// + /// + /// + /// + public static bool NameInList(this Series series, IEnumerable list) { - /// - /// Checks against all the name variables of the Series if it matches anything in the list. This does not check against format. - /// - /// - /// - /// - public static bool NameInList(this Series series, IEnumerable list) + return list.Any(name => Services.Tasks.Scanner.Parser.Parser.Normalize(name) == series.NormalizedName || Services.Tasks.Scanner.Parser.Parser.Normalize(name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name) + || name == series.Name || name == series.LocalizedName || name == series.OriginalName || Services.Tasks.Scanner.Parser.Parser.Normalize(name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.OriginalName)); + } + + /// + /// Checks against all the name variables of the Series if it matches anything in the list. Includes a check against the Format of the Series + /// + /// + /// + /// + public static bool NameInList(this Series series, IEnumerable list) + { + return list.Any(name => Services.Tasks.Scanner.Parser.Parser.Normalize(name.Name) == series.NormalizedName || Services.Tasks.Scanner.Parser.Parser.Normalize(name.Name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name) + || name.Name == series.Name || name.Name == series.LocalizedName || name.Name == series.OriginalName || Services.Tasks.Scanner.Parser.Parser.Normalize(name.Name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.OriginalName) && series.Format == name.Format); + } + + /// + /// Checks against all the name variables of the Series if it matches the + /// + /// + /// + /// + public static bool NameInParserInfo(this Series series, ParserInfo info) + { + if (info == null) return false; + return Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) == series.NormalizedName || Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name) + || info.Series == series.Name || info.Series == series.LocalizedName || info.Series == series.OriginalName + || Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.OriginalName); + } + + /// + /// Calculates the Cover Image for the Series + /// + /// + /// + /// This is under the assumption that the Volume already has a Cover Image calculated and set + public static string GetCoverImage(this Series series) + { + var volumes = series.Volumes ?? new List(); + var firstVolume = volumes.GetCoverImage(series.Format); + string coverImage = null; + + var chapters = firstVolume.Chapters.OrderBy(c => double.Parse(c.Number), ChapterSortComparerZeroFirst.Default).ToList(); + if (chapters.Count > 1 && chapters.Any(c => c.IsSpecial)) { - return list.Any(name => Services.Tasks.Scanner.Parser.Parser.Normalize(name) == series.NormalizedName || Services.Tasks.Scanner.Parser.Parser.Normalize(name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name) - || name == series.Name || name == series.LocalizedName || name == series.OriginalName || Services.Tasks.Scanner.Parser.Parser.Normalize(name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.OriginalName)); + coverImage = chapters.FirstOrDefault(c => !c.IsSpecial)?.CoverImage ?? chapters.First().CoverImage; + firstVolume = null; } - /// - /// Checks against all the name variables of the Series if it matches anything in the list. Includes a check against the Format of the Series - /// - /// - /// - /// - public static bool NameInList(this Series series, IEnumerable list) - { - return list.Any(name => Services.Tasks.Scanner.Parser.Parser.Normalize(name.Name) == series.NormalizedName || Services.Tasks.Scanner.Parser.Parser.Normalize(name.Name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name) - || name.Name == series.Name || name.Name == series.LocalizedName || name.Name == series.OriginalName || Services.Tasks.Scanner.Parser.Parser.Normalize(name.Name) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.OriginalName) && series.Format == name.Format); - } - - /// - /// Checks against all the name variables of the Series if it matches the - /// - /// - /// - /// - public static bool NameInParserInfo(this Series series, ParserInfo info) - { - if (info == null) return false; - return Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) == series.NormalizedName || Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.Name) - || info.Series == series.Name || info.Series == series.LocalizedName || info.Series == series.OriginalName - || Services.Tasks.Scanner.Parser.Parser.Normalize(info.Series) == Services.Tasks.Scanner.Parser.Parser.Normalize(series.OriginalName); - } + return firstVolume?.CoverImage ?? coverImage; } } diff --git a/API/Extensions/VolumeListExtensions.cs b/API/Extensions/VolumeListExtensions.cs index 8933e04a5..5c9084764 100644 --- a/API/Extensions/VolumeListExtensions.cs +++ b/API/Extensions/VolumeListExtensions.cs @@ -4,29 +4,30 @@ using API.Comparators; using API.Entities; using API.Entities.Enums; -namespace API.Extensions -{ - public static class VolumeListExtensions - { - /// - /// Selects the first Volume to get the cover image from. For a book with only a special, the special will be returned. - /// If there are both specials and non-specials, then the first non-special will be returned. - /// - /// - /// - /// - public static Volume GetCoverImage(this IList volumes, MangaFormat seriesFormat) - { - if (seriesFormat is MangaFormat.Epub or MangaFormat.Pdf) - { - return volumes.OrderBy(x => x.Number).FirstOrDefault(); - } +namespace API.Extensions; - if (volumes.Any(x => x.Number != 0)) - { - return volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); - } - return volumes.OrderBy(x => x.Number).FirstOrDefault(); +public static class VolumeListExtensions +{ + /// + /// Selects the first Volume to get the cover image from. For a book with only a special, the special will be returned. + /// If there are both specials and non-specials, then the first non-special will be returned. + /// + /// + /// + /// + public static Volume GetCoverImage(this IList volumes, MangaFormat seriesFormat) + { + if (seriesFormat == MangaFormat.Epub || seriesFormat == MangaFormat.Pdf) + { + return volumes.MinBy(x => x.Number); } + + if (volumes.Any(x => x.Number != 0)) + { + return volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + } + + // We only have 1 volume of chapters, we need to be cautious if there are specials, as we don't want to order them first + return volumes.MinBy(x => x.Number); } } diff --git a/API/Extensions/ZipArchiveExtensions.cs b/API/Extensions/ZipArchiveExtensions.cs index a871162e8..89a083490 100644 --- a/API/Extensions/ZipArchiveExtensions.cs +++ b/API/Extensions/ZipArchiveExtensions.cs @@ -2,18 +2,17 @@ using System.IO.Compression; using System.Linq; -namespace API.Extensions +namespace API.Extensions; + +public static class ZipArchiveExtensions { - public static class ZipArchiveExtensions + /// + /// Checks if archive has one or more files. Excludes directory entries. + /// + /// + /// + public static bool HasFiles(this ZipArchive archive) { - /// - /// Checks if archive has one or more files. Excludes directory entries. - /// - /// - /// - public static bool HasFiles(this ZipArchive archive) - { - return archive.Entries.Any(x => Path.HasExtension(x.FullName)); - } + return archive.Entries.Any(x => Path.HasExtension(x.FullName)); } -} \ No newline at end of file +} diff --git a/API/Helpers/AutoMapperProfiles.cs b/API/Helpers/AutoMapperProfiles.cs index a15913374..d89a3f9e0 100644 --- a/API/Helpers/AutoMapperProfiles.cs +++ b/API/Helpers/AutoMapperProfiles.cs @@ -1,12 +1,13 @@ using System.Collections.Generic; using System.Linq; using API.DTOs; +using API.DTOs.Account; using API.DTOs.CollectionTags; +using API.DTOs.Device; using API.DTOs.Metadata; using API.DTOs.Reader; using API.DTOs.ReadingLists; using API.DTOs.Search; -using API.DTOs.SeriesDetail; using API.DTOs.Settings; using API.DTOs.Theme; using API.Entities; @@ -15,135 +16,145 @@ using API.Entities.Metadata; using API.Helpers.Converters; using AutoMapper; -namespace API.Helpers +namespace API.Helpers; + +public class AutoMapperProfiles : Profile { - public class AutoMapperProfiles : Profile + public AutoMapperProfiles() { - public AutoMapperProfiles() - { - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); - CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); + CreateMap(); - CreateMap() - .ForMember(dest => dest.Writers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Writer))) - .ForMember(dest => dest.CoverArtists, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.CoverArtist))) - .ForMember(dest => dest.Characters, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character))) - .ForMember(dest => dest.Publishers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Publisher))) - .ForMember(dest => dest.Colorists, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Colorist))) - .ForMember(dest => dest.Inkers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker))) - .ForMember(dest => dest.Letterers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer))) - .ForMember(dest => dest.Pencillers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Penciller))) - .ForMember(dest => dest.Translators, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Translator))) - .ForMember(dest => dest.Editors, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor))); + CreateMap() + .ForMember(dest => dest.Writers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Writer))) + .ForMember(dest => dest.CoverArtists, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.CoverArtist))) + .ForMember(dest => dest.Characters, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character))) + .ForMember(dest => dest.Publishers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Publisher))) + .ForMember(dest => dest.Colorists, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Colorist))) + .ForMember(dest => dest.Inkers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker))) + .ForMember(dest => dest.Letterers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer))) + .ForMember(dest => dest.Pencillers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Penciller))) + .ForMember(dest => dest.Translators, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Translator))) + .ForMember(dest => dest.Editors, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor))); - CreateMap() - .ForMember(dest => dest.Writers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Writer))) - .ForMember(dest => dest.CoverArtists, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.CoverArtist))) - .ForMember(dest => dest.Colorists, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Colorist))) - .ForMember(dest => dest.Inkers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker))) - .ForMember(dest => dest.Letterers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer))) - .ForMember(dest => dest.Pencillers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Penciller))) - .ForMember(dest => dest.Publishers, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Publisher))) - .ForMember(dest => dest.Translators, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Translator))) - .ForMember(dest => dest.Characters, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character))) - .ForMember(dest => dest.Editors, - opt => - opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor))); + CreateMap() + .ForMember(dest => dest.Writers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Writer))) + .ForMember(dest => dest.CoverArtists, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.CoverArtist))) + .ForMember(dest => dest.Colorists, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Colorist))) + .ForMember(dest => dest.Inkers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker))) + .ForMember(dest => dest.Letterers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer))) + .ForMember(dest => dest.Pencillers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Penciller))) + .ForMember(dest => dest.Publishers, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Publisher))) + .ForMember(dest => dest.Translators, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Translator))) + .ForMember(dest => dest.Characters, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character))) + .ForMember(dest => dest.Editors, + opt => + opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor))); - // CreateMap() - // .ForMember(dest => dest.Adaptations, - // opt => - // opt.MapFrom(src => src.Where(p => p.Role == PersonRole.Writer))) - - CreateMap(); - CreateMap(); - CreateMap() - .ForMember(dest => dest.Theme, - opt => - opt.MapFrom(src => src.Theme)) - .ForMember(dest => dest.BookReaderThemeName, - opt => - opt.MapFrom(src => src.BookThemeName)) - .ForMember(dest => dest.BookReaderLayoutMode, - opt => - opt.MapFrom(src => src.BookReaderLayoutMode)); + CreateMap() + .ForMember(dest => dest.AgeRestriction, + opt => + opt.MapFrom(src => new AgeRestrictionDto() + { + AgeRating = src.AgeRestriction, + IncludeUnknowns = src.AgeRestrictionIncludeUnknowns + })); + CreateMap(); + CreateMap() + .ForMember(dest => dest.Theme, + opt => + opt.MapFrom(src => src.Theme)) + .ForMember(dest => dest.BookReaderThemeName, + opt => + opt.MapFrom(src => src.BookThemeName)) + .ForMember(dest => dest.BookReaderLayoutMode, + opt => + opt.MapFrom(src => src.BookReaderLayoutMode)); - CreateMap(); + CreateMap(); - CreateMap(); - CreateMap(); + CreateMap(); + CreateMap(); - CreateMap() - .ForMember(dest => dest.SeriesId, - opt => opt.MapFrom(src => src.Id)) - .ForMember(dest => dest.LibraryName, - opt => opt.MapFrom(src => src.Library.Name)); + CreateMap() + .ForMember(dest => dest.SeriesId, + opt => opt.MapFrom(src => src.Id)) + .ForMember(dest => dest.LibraryName, + opt => opt.MapFrom(src => src.Library.Name)); - CreateMap() - .ForMember(dest => dest.Folders, - opt => - opt.MapFrom(src => src.Folders.Select(x => x.Path).ToList())); + CreateMap() + .ForMember(dest => dest.Folders, + opt => + opt.MapFrom(src => src.Folders.Select(x => x.Path).ToList())); - CreateMap() - .AfterMap((ps, pst, context) => context.Mapper.Map(ps.Libraries, pst.Libraries)); + CreateMap() + .ForMember(dest => dest.AgeRestriction, + opt => + opt.MapFrom(src => new AgeRestrictionDto() + { + AgeRating = src.AgeRestriction, + IncludeUnknowns = src.AgeRestrictionIncludeUnknowns + })) + .AfterMap((ps, pst, context) => context.Mapper.Map(ps.Libraries, pst.Libraries)); - CreateMap(); + CreateMap(); - CreateMap, ServerSettingDto>() - .ConvertUsing(); + CreateMap, ServerSettingDto>() + .ConvertUsing(); - CreateMap, ServerSettingDto>() - .ConvertUsing(); + CreateMap, ServerSettingDto>() + .ConvertUsing(); + + CreateMap(); - } } } diff --git a/API/Helpers/CacheHelper.cs b/API/Helpers/CacheHelper.cs index 3f4382d57..06a2ba764 100644 --- a/API/Helpers/CacheHelper.cs +++ b/API/Helpers/CacheHelper.cs @@ -13,7 +13,7 @@ public interface ICacheHelper bool CoverImageExists(string path); - bool HasFileNotChangedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile firstFile); + bool IsFileUnmodifiedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile firstFile); bool HasFileChangedSinceLastScan(DateTime lastScan, bool forceUpdate, MangaFile firstFile); } @@ -49,13 +49,13 @@ public class CacheHelper : ICacheHelper } /// - /// Has the file been modified since last scan or is user forcing an update + /// Has the file been not been modified since last scan or is user forcing an update /// /// /// /// /// - public bool HasFileNotChangedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile firstFile) + public bool IsFileUnmodifiedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile firstFile) { return firstFile != null && (!forceUpdate && diff --git a/API/Helpers/Converters/CronConverter.cs b/API/Helpers/Converters/CronConverter.cs index 4f8305e01..4e9547c6c 100644 --- a/API/Helpers/Converters/CronConverter.cs +++ b/API/Helpers/Converters/CronConverter.cs @@ -1,29 +1,28 @@ using System.Collections.Generic; using Hangfire; -namespace API.Helpers.Converters -{ - public static class CronConverter - { - public static readonly IEnumerable Options = new [] - { - "disabled", - "daily", - "weekly", - }; - public static string ConvertToCronNotation(string source) - { - var destination = string.Empty; - destination = source.ToLower() switch - { - "daily" => Cron.Daily(), - "weekly" => Cron.Weekly(), - "disabled" => Cron.Never(), - "" => Cron.Never(), - _ => destination - }; +namespace API.Helpers.Converters; - return destination; - } +public static class CronConverter +{ + public static readonly IEnumerable Options = new [] + { + "disabled", + "daily", + "weekly", + }; + public static string ConvertToCronNotation(string source) + { + var destination = string.Empty; + destination = source.ToLower() switch + { + "daily" => Cron.Daily(), + "weekly" => Cron.Weekly(), + "disabled" => Cron.Never(), + "" => Cron.Never(), + _ => destination + }; + + return destination; } } diff --git a/API/Helpers/Converters/ServerSettingConverter.cs b/API/Helpers/Converters/ServerSettingConverter.cs index 6cc48e9eb..f23fddca7 100644 --- a/API/Helpers/Converters/ServerSettingConverter.cs +++ b/API/Helpers/Converters/ServerSettingConverter.cs @@ -4,69 +4,71 @@ using API.Entities; using API.Entities.Enums; using AutoMapper; -namespace API.Helpers.Converters -{ - public class ServerSettingConverter : ITypeConverter, ServerSettingDto> - { - public ServerSettingDto Convert(IEnumerable source, ServerSettingDto destination, ResolutionContext context) - { - destination ??= new ServerSettingDto(); - foreach (var row in source) - { - switch (row.Key) - { - case ServerSettingKey.CacheDirectory: - destination.CacheDirectory = row.Value; - break; - case ServerSettingKey.TaskScan: - destination.TaskScan = row.Value; - break; - case ServerSettingKey.LoggingLevel: - destination.LoggingLevel = row.Value; - break; - case ServerSettingKey.TaskBackup: - destination.TaskBackup = row.Value; - break; - case ServerSettingKey.Port: - destination.Port = int.Parse(row.Value); - break; - case ServerSettingKey.AllowStatCollection: - destination.AllowStatCollection = bool.Parse(row.Value); - break; - case ServerSettingKey.EnableOpds: - destination.EnableOpds = bool.Parse(row.Value); - break; - case ServerSettingKey.BaseUrl: - destination.BaseUrl = row.Value; - break; - case ServerSettingKey.BookmarkDirectory: - destination.BookmarksDirectory = row.Value; - break; - case ServerSettingKey.EmailServiceUrl: - destination.EmailServiceUrl = row.Value; - break; - case ServerSettingKey.InstallVersion: - destination.InstallVersion = row.Value; - break; - case ServerSettingKey.ConvertBookmarkToWebP: - destination.ConvertBookmarkToWebP = bool.Parse(row.Value); - break; - case ServerSettingKey.EnableSwaggerUi: - destination.EnableSwaggerUi = bool.Parse(row.Value); - break; - case ServerSettingKey.TotalBackups: - destination.TotalBackups = int.Parse(row.Value); - break; - case ServerSettingKey.InstallId: - destination.InstallId = row.Value; - break; - case ServerSettingKey.EnableFolderWatching: - destination.EnableFolderWatching = bool.Parse(row.Value); - break; - } - } +namespace API.Helpers.Converters; - return destination; +public class ServerSettingConverter : ITypeConverter, ServerSettingDto> +{ + public ServerSettingDto Convert(IEnumerable source, ServerSettingDto destination, ResolutionContext context) + { + destination ??= new ServerSettingDto(); + foreach (var row in source) + { + switch (row.Key) + { + case ServerSettingKey.CacheDirectory: + destination.CacheDirectory = row.Value; + break; + case ServerSettingKey.TaskScan: + destination.TaskScan = row.Value; + break; + case ServerSettingKey.LoggingLevel: + destination.LoggingLevel = row.Value; + break; + case ServerSettingKey.TaskBackup: + destination.TaskBackup = row.Value; + break; + case ServerSettingKey.Port: + destination.Port = int.Parse(row.Value); + break; + case ServerSettingKey.AllowStatCollection: + destination.AllowStatCollection = bool.Parse(row.Value); + break; + case ServerSettingKey.EnableOpds: + destination.EnableOpds = bool.Parse(row.Value); + break; + case ServerSettingKey.BaseUrl: + destination.BaseUrl = row.Value; + break; + case ServerSettingKey.BookmarkDirectory: + destination.BookmarksDirectory = row.Value; + break; + case ServerSettingKey.EmailServiceUrl: + destination.EmailServiceUrl = row.Value; + break; + case ServerSettingKey.InstallVersion: + destination.InstallVersion = row.Value; + break; + case ServerSettingKey.ConvertBookmarkToWebP: + destination.ConvertBookmarkToWebP = bool.Parse(row.Value); + break; + case ServerSettingKey.EnableSwaggerUi: + destination.EnableSwaggerUi = bool.Parse(row.Value); + break; + case ServerSettingKey.TotalBackups: + destination.TotalBackups = int.Parse(row.Value); + break; + case ServerSettingKey.InstallId: + destination.InstallId = row.Value; + break; + case ServerSettingKey.EnableFolderWatching: + destination.EnableFolderWatching = bool.Parse(row.Value); + break; + case ServerSettingKey.TotalLogs: + destination.TotalLogs = int.Parse(row.Value); + break; + } } + + return destination; } } diff --git a/API/Helpers/Filters/ETagFromFilename.cs b/API/Helpers/Filters/ETagFromFilename.cs deleted file mode 100644 index 30b798ea4..000000000 --- a/API/Helpers/Filters/ETagFromFilename.cs +++ /dev/null @@ -1,234 +0,0 @@ -using System; -using System.IO; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Security.Cryptography; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.AspNetCore.Mvc.Filters; -using Microsoft.Net.Http.Headers; -using Newtonsoft.Json; - -namespace API.Helpers.Filters; - -// NOTE: I'm leaving this in, but I don't think it's needed. Will validate in next release. - -//[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)] -// public class ETagFromFilename : ActionFilterAttribute, IAsyncActionFilter -// { -// public override async Task OnActionExecutionAsync(ActionExecutingContext executingContext, -// ActionExecutionDelegate next) -// { -// var request = executingContext.HttpContext.Request; -// -// var executedContext = await next(); -// var response = executedContext.HttpContext.Response; -// -// // Computing ETags for Response Caching on GET requests -// if (request.Method == HttpMethod.Get.Method && response.StatusCode == (int) HttpStatusCode.OK) -// { -// ValidateETagForResponseCaching(executedContext); -// } -// } -// -// private void ValidateETagForResponseCaching(ActionExecutedContext executedContext) -// { -// if (executedContext.Result == null) -// { -// return; -// } -// -// var request = executedContext.HttpContext.Request; -// var response = executedContext.HttpContext.Response; -// -// var objectResult = executedContext.Result as ObjectResult; -// if (objectResult == null) return; -// var result = (PhysicalFileResult) objectResult.Value; -// -// // generate ETag from LastModified property -// //var etag = GenerateEtagFromFilename(result.); -// -// // generates ETag from the entire response Content -// //var etag = GenerateEtagFromResponseBodyWithHash(result); -// -// if (request.Headers.ContainsKey(HeaderNames.IfNoneMatch)) -// { -// // fetch etag from the incoming request header -// var incomingEtag = request.Headers[HeaderNames.IfNoneMatch].ToString(); -// -// // if both the etags are equal -// // raise a 304 Not Modified Response -// if (incomingEtag.Equals(etag)) -// { -// executedContext.Result = new StatusCodeResult((int) HttpStatusCode.NotModified); -// } -// } -// -// // add ETag response header -// response.Headers.Add(HeaderNames.ETag, new[] {etag}); -// } -// - // private static string GenerateEtagFromFilename(HttpResponse response, string filename, int maxAge = 10) - // { - // if (filename is not {Length: > 0}) return string.Empty; - // var hashContent = filename + File.GetLastWriteTimeUtc(filename); - // using var sha1 = SHA256.Create(); - // return string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2"))); - // } -// } - -[AttributeUsage(AttributeTargets.Method)] -public class ETagFilter : Attribute, IActionFilter -{ - private readonly int[] _statusCodes; - - public ETagFilter(params int[] statusCodes) - { - _statusCodes = statusCodes; - if (statusCodes.Length == 0) _statusCodes = new[] { 200 }; - } - - public void OnActionExecuting(ActionExecutingContext context) - { - } - - public void OnActionExecuted(ActionExecutedContext context) - { - if (context.HttpContext.Request.Method != "GET" || context.HttpContext.Request.Method != "HEAD") return; - if (!_statusCodes.Contains(context.HttpContext.Response.StatusCode)) return; - - var etag = string.Empty;; - //I just serialize the result to JSON, could do something less costly - if (context.Result is PhysicalFileResult) - { - // Do a cheap LastWriteTime etag gen - if (context.Result is PhysicalFileResult fileResult) - { - etag = ETagGenerator.GenerateEtagFromFilename(fileResult.FileName); - context.HttpContext.Response.Headers.LastModified = File.GetLastWriteTimeUtc(fileResult.FileName).ToLongDateString(); - } - } - - if (string.IsNullOrEmpty(etag)) - { - var content = JsonConvert.SerializeObject(context.Result); - etag = ETagGenerator.GetETag(context.HttpContext.Request.Path.ToString(), Encoding.UTF8.GetBytes(content)); - } - - - if (context.HttpContext.Request.Headers.IfNoneMatch.ToString() == etag) - { - context.Result = new StatusCodeResult(304); - } - - //context.HttpContext.Response.Headers.ETag = etag; - } - - -} - -// Helper class that generates the etag from a key (route) and content (response) -public static class ETagGenerator -{ - public static string GetETag(string key, byte[] contentBytes) - { - var keyBytes = Encoding.UTF8.GetBytes(key); - var combinedBytes = Combine(keyBytes, contentBytes); - - return GenerateETag(combinedBytes); - } - - private static string GenerateETag(byte[] data) - { - using var md5 = MD5.Create(); - var hash = md5.ComputeHash(data); - var hex = BitConverter.ToString(hash); - return hex.Replace("-", ""); - } - - private static byte[] Combine(byte[] a, byte[] b) - { - var c = new byte[a.Length + b.Length]; - Buffer.BlockCopy(a, 0, c, 0, a.Length); - Buffer.BlockCopy(b, 0, c, a.Length, b.Length); - return c; - } - - public static string GenerateEtagFromFilename(string filename) - { - if (filename is not {Length: > 0}) return string.Empty; - var hashContent = filename + File.GetLastWriteTimeUtc(filename); - using var md5 = MD5.Create(); - return string.Concat(md5.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2"))); - } -} - -// /// -// /// Enables HTTP Response CacheControl management with ETag values. -// /// -// public class ClientCacheWithEtagAttribute : ActionFilterAttribute -// { -// private readonly TimeSpan _clientCache; -// -// private readonly HttpMethod[] _supportedRequestMethods = { -// HttpMethod.Get, -// HttpMethod.Head -// }; -// -// /// -// /// Default constructor -// /// -// /// Indicates for how long the client should cache the response. The value is in seconds -// public ClientCacheWithEtagAttribute(int clientCacheInSeconds) -// { -// _clientCache = TimeSpan.FromSeconds(clientCacheInSeconds); -// } -// -// public override async Task OnActionExecutionAsync(ActionExecutingContext executingContext, ActionExecutionDelegate next) -// { -// -// if (executingContext.Response?.Content == null) -// { -// return; -// } -// -// var body = await executingContext.Response.Content.ReadAsStringAsync(); -// if (body == null) -// { -// return; -// } -// -// var computedEntityTag = GetETag(Encoding.UTF8.GetBytes(body)); -// -// if (actionExecutedContext.Request.Headers.IfNoneMatch.Any() -// && actionExecutedContext.Request.Headers.IfNoneMatch.First().Tag.Trim('"').Equals(computedEntityTag, StringComparison.InvariantCultureIgnoreCase)) -// { -// actionExecutedContext.Response.StatusCode = HttpStatusCode.NotModified; -// actionExecutedContext.Response.Content = null; -// } -// -// var cacheControlHeader = new CacheControlHeaderValue -// { -// Private = true, -// MaxAge = _clientCache -// }; -// -// actionExecutedContext.Response.Headers.ETag = new EntityTagHeaderValue($"\"{computedEntityTag}\"", false); -// actionExecutedContext.Response.Headers.CacheControl = cacheControlHeader; -// } -// -// private static string GetETag(byte[] contentBytes) -// { -// using (var md5 = MD5.Create()) -// { -// var hash = md5.ComputeHash(contentBytes); -// string hex = BitConverter.ToString(hash); -// return hex.Replace("-", ""); -// } -// } -// } - diff --git a/API/Helpers/GenreHelper.cs b/API/Helpers/GenreHelper.cs index 5eadea8fa..631baf85c 100644 --- a/API/Helpers/GenreHelper.cs +++ b/API/Helpers/GenreHelper.cs @@ -63,14 +63,4 @@ public static class GenreHelper metadataGenres.Add(genre); } } - - public static void AddGenreIfNotExists(BlockingCollection metadataGenres, Genre genre) - { - var existingGenre = metadataGenres.FirstOrDefault(p => - p.NormalizedTitle == Services.Tasks.Scanner.Parser.Parser.Normalize(genre.Title)); - if (existingGenre == null) - { - metadataGenres.Add(genre); - } - } } diff --git a/API/Helpers/PagedList.cs b/API/Helpers/PagedList.cs index b87687a6e..0c666612d 100644 --- a/API/Helpers/PagedList.cs +++ b/API/Helpers/PagedList.cs @@ -4,30 +4,29 @@ using System.Linq; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; -namespace API.Helpers +namespace API.Helpers; + +public class PagedList : List { - public class PagedList : List + public PagedList(IEnumerable items, int count, int pageNumber, int pageSize) { - public PagedList(IEnumerable items, int count, int pageNumber, int pageSize) - { - CurrentPage = pageNumber; - TotalPages = (int) Math.Ceiling(count / (double) pageSize); - PageSize = pageSize; - TotalCount = count; - AddRange(items); - } - - public int CurrentPage { get; set; } - public int TotalPages { get; set; } - public int PageSize { get; set; } - public int TotalCount { get; set; } - - public static async Task> CreateAsync(IQueryable source, int pageNumber, int pageSize) - { - // NOTE: OrderBy warning being thrown here even if query has the orderby statement - var count = await source.CountAsync(); - var items = await source.Skip((pageNumber - 1) * pageSize).Take(pageSize).ToListAsync(); - return new PagedList(items, count, pageNumber, pageSize); - } + CurrentPage = pageNumber; + TotalPages = (int) Math.Ceiling(count / (double) pageSize); + PageSize = pageSize; + TotalCount = count; + AddRange(items); } -} \ No newline at end of file + + public int CurrentPage { get; set; } + public int TotalPages { get; set; } + public int PageSize { get; set; } + public int TotalCount { get; set; } + + public static async Task> CreateAsync(IQueryable source, int pageNumber, int pageSize) + { + // NOTE: OrderBy warning being thrown here even if query has the orderby statement + var count = await source.CountAsync(); + var items = await source.Skip((pageNumber - 1) * pageSize).Take(pageSize).ToListAsync(); + return new PagedList(items, count, pageNumber, pageSize); + } +} diff --git a/API/Helpers/PaginationHeader.cs b/API/Helpers/PaginationHeader.cs index 8d24eeca0..d3c582798 100644 --- a/API/Helpers/PaginationHeader.cs +++ b/API/Helpers/PaginationHeader.cs @@ -1,18 +1,17 @@ -namespace API.Helpers -{ - public class PaginationHeader - { - public PaginationHeader(int currentPage, int itemsPerPage, int totalItems, int totalPages) - { - CurrentPage = currentPage; - ItemsPerPage = itemsPerPage; - TotalItems = totalItems; - TotalPages = totalPages; - } +namespace API.Helpers; - public int CurrentPage { get; set; } - public int ItemsPerPage { get; set; } - public int TotalItems { get; set; } - public int TotalPages { get; set; } +public class PaginationHeader +{ + public PaginationHeader(int currentPage, int itemsPerPage, int totalItems, int totalPages) + { + CurrentPage = currentPage; + ItemsPerPage = itemsPerPage; + TotalItems = totalItems; + TotalPages = totalPages; } -} \ No newline at end of file + + public int CurrentPage { get; set; } + public int ItemsPerPage { get; set; } + public int TotalItems { get; set; } + public int TotalPages { get; set; } +} diff --git a/API/Helpers/SQLHelper.cs b/API/Helpers/SQLHelper.cs index d06d246ef..dd56a288b 100644 --- a/API/Helpers/SQLHelper.cs +++ b/API/Helpers/SQLHelper.cs @@ -5,27 +5,26 @@ using System.Data.Common; using API.DTOs; using Microsoft.EntityFrameworkCore; -namespace API.Helpers +namespace API.Helpers; + +public static class SqlHelper { - public static class SqlHelper + public static List RawSqlQuery(DbContext context, string query, Func map) { - public static List RawSqlQuery(DbContext context, string query, Func map) + using var command = context.Database.GetDbConnection().CreateCommand(); + command.CommandText = query; + command.CommandType = CommandType.Text; + + context.Database.OpenConnection(); + + using var result = command.ExecuteReader(); + var entities = new List(); + + while (result.Read()) { - using var command = context.Database.GetDbConnection().CreateCommand(); - command.CommandText = query; - command.CommandType = CommandType.Text; - - context.Database.OpenConnection(); - - using var result = command.ExecuteReader(); - var entities = new List(); - - while (result.Read()) - { - entities.Add(map(result)); - } - - return entities; + entities.Add(map(result)); } + + return entities; } } diff --git a/API/Helpers/UserParams.cs b/API/Helpers/UserParams.cs index 87cc28471..2ad679263 100644 --- a/API/Helpers/UserParams.cs +++ b/API/Helpers/UserParams.cs @@ -1,18 +1,17 @@ -namespace API.Helpers -{ - public class UserParams - { - private const int MaxPageSize = int.MaxValue; - public int PageNumber { get; init; } = 1; - private readonly int _pageSize = MaxPageSize; +namespace API.Helpers; - /// - /// If set to 0, will set as MaxInt - /// - public int PageSize - { - get => _pageSize; - init => _pageSize = (value == 0) ? MaxPageSize : value; - } +public class UserParams +{ + private const int MaxPageSize = int.MaxValue; + public int PageNumber { get; init; } = 1; + private readonly int _pageSize = MaxPageSize; + + /// + /// If set to 0, will set as MaxInt + /// + public int PageSize + { + get => _pageSize; + init => _pageSize = (value == 0) ? MaxPageSize : value; } } diff --git a/API/Logging/LogEnricher.cs b/API/Logging/LogEnricher.cs new file mode 100644 index 000000000..8cc7a6b29 --- /dev/null +++ b/API/Logging/LogEnricher.cs @@ -0,0 +1,19 @@ +using System.Linq; +using Microsoft.AspNetCore.Http; +using Serilog; + +namespace API.Logging; + +public static class LogEnricher +{ + /// + /// Enriches the HTTP request log with additional data via the Diagnostic Context + /// + /// The Serilog diagnostic context + /// The current HTTP Context + public static void EnrichFromRequest(IDiagnosticContext diagnosticContext, HttpContext httpContext) + { + diagnosticContext.Set("ClientIP", httpContext.Connection.RemoteIpAddress?.ToString()); + diagnosticContext.Set("UserAgent", httpContext.Request.Headers["User-Agent"].FirstOrDefault()); + } +} diff --git a/API/Logging/LogLevelOptions.cs b/API/Logging/LogLevelOptions.cs new file mode 100644 index 000000000..34d7d353f --- /dev/null +++ b/API/Logging/LogLevelOptions.cs @@ -0,0 +1,100 @@ +using System.IO; +using API.Services; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.Configuration; +using Serilog; +using Serilog.Core; +using Serilog.Events; +using Serilog.Formatting.Display; + +namespace API.Logging; + +/// +/// This class represents information for configuring Logging in the Application. Only a high log level is exposed and Kavita +/// controls the underlying log levels for different loggers in ASP.NET +/// +public static class LogLevelOptions +{ + public const string LogFile = "config/logs/kavita.log"; + public const bool LogRollingEnabled = true; + /// + /// Controls the Logging Level of the Application + /// + private static readonly LoggingLevelSwitch LogLevelSwitch = new (); + /// + /// Controls Microsoft's Logging Level + /// + private static readonly LoggingLevelSwitch MicrosoftLogLevelSwitch = new (LogEventLevel.Error); + /// + /// Controls Microsoft.Hosting.Lifetime's Logging Level + /// + private static readonly LoggingLevelSwitch MicrosoftHostingLifetimeLogLevelSwitch = new (LogEventLevel.Error); + /// + /// Controls Hangfire's Logging Level + /// + private static readonly LoggingLevelSwitch HangfireLogLevelSwitch = new (LogEventLevel.Error); + /// + /// Controls Microsoft.AspNetCore.Hosting.Internal.WebHost's Logging Level + /// + private static readonly LoggingLevelSwitch AspNetCoreLogLevelSwitch = new (LogEventLevel.Error); + + public static LoggerConfiguration CreateConfig(LoggerConfiguration configuration) + { + const string outputTemplate = "[Kavita] [{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} {CorrelationId} {ThreadId}] [{Level}] {SourceContext} {Message:lj}{NewLine}{Exception}"; + return configuration + .MinimumLevel + .ControlledBy(LogLevelSwitch) + .MinimumLevel.Override("Microsoft", MicrosoftLogLevelSwitch) + .MinimumLevel.Override("Microsoft.Hosting.Lifetime", MicrosoftHostingLifetimeLogLevelSwitch) + .MinimumLevel.Override("Hangfire", HangfireLogLevelSwitch) + .MinimumLevel.Override("Microsoft.AspNetCore.Hosting.Internal.WebHost", AspNetCoreLogLevelSwitch) + // Suppress noisy loggers that add no value + .MinimumLevel.Override("Microsoft.AspNetCore.ResponseCaching.ResponseCachingMiddleware", LogEventLevel.Error) + .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Error) + .Enrich.FromLogContext() + .Enrich.WithThreadId() + .WriteTo.Console(new MessageTemplateTextFormatter(outputTemplate)) + .WriteTo.File(LogFile, + shared: true, + rollingInterval: RollingInterval.Day, + outputTemplate: outputTemplate); + } + + public static void SwitchLogLevel(string level) + { + switch (level) + { + case "Debug": + LogLevelSwitch.MinimumLevel = LogEventLevel.Debug; + MicrosoftLogLevelSwitch.MinimumLevel = LogEventLevel.Warning; // This is DB output information, Inf shows the SQL + MicrosoftHostingLifetimeLogLevelSwitch.MinimumLevel = LogEventLevel.Information; + AspNetCoreLogLevelSwitch.MinimumLevel = LogEventLevel.Warning; + break; + case "Information": + LogLevelSwitch.MinimumLevel = LogEventLevel.Error; + MicrosoftLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + MicrosoftHostingLifetimeLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + AspNetCoreLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + break; + case "Trace": + LogLevelSwitch.MinimumLevel = LogEventLevel.Verbose; + MicrosoftLogLevelSwitch.MinimumLevel = LogEventLevel.Information; + MicrosoftHostingLifetimeLogLevelSwitch.MinimumLevel = LogEventLevel.Debug; + AspNetCoreLogLevelSwitch.MinimumLevel = LogEventLevel.Information; + break; + case "Warning": + LogLevelSwitch.MinimumLevel = LogEventLevel.Warning; + MicrosoftLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + MicrosoftHostingLifetimeLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + AspNetCoreLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + break; + case "Critical": + LogLevelSwitch.MinimumLevel = LogEventLevel.Fatal; + MicrosoftLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + MicrosoftHostingLifetimeLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + AspNetCoreLogLevelSwitch.MinimumLevel = LogEventLevel.Error; + break; + } + } + +} diff --git a/API/Middleware/ExceptionMiddleware.cs b/API/Middleware/ExceptionMiddleware.cs index f844d32f9..81238d7a3 100644 --- a/API/Middleware/ExceptionMiddleware.cs +++ b/API/Middleware/ExceptionMiddleware.cs @@ -7,49 +7,48 @@ using Microsoft.AspNetCore.Http; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; -namespace API.Middleware +namespace API.Middleware; + +public class ExceptionMiddleware { - public class ExceptionMiddleware + private readonly RequestDelegate _next; + private readonly ILogger _logger; + private readonly IHostEnvironment _env; + + + public ExceptionMiddleware(RequestDelegate next, ILogger logger, IHostEnvironment env) { - private readonly RequestDelegate _next; - private readonly ILogger _logger; - private readonly IHostEnvironment _env; + _next = next; + _logger = logger; + _env = env; + } - - public ExceptionMiddleware(RequestDelegate next, ILogger logger, IHostEnvironment env) + public async Task InvokeAsync(HttpContext context) + { + try { - _next = next; - _logger = logger; - _env = env; + await _next(context); // downstream middlewares or http call } - - public async Task InvokeAsync(HttpContext context) + catch (Exception ex) { - try + _logger.LogError(ex, "There was an exception"); + context.Response.ContentType = "application/json"; + context.Response.StatusCode = (int) HttpStatusCode.InternalServerError; + + var errorMessage = string.IsNullOrEmpty(ex.Message) ? "Internal Server Error" : ex.Message; + + var response = new ApiException(context.Response.StatusCode, errorMessage, ex.StackTrace); + + var options = new JsonSerializerOptions { - await _next(context); // downstream middlewares or http call - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an exception"); - context.Response.ContentType = "application/json"; - context.Response.StatusCode = (int) HttpStatusCode.InternalServerError; + PropertyNamingPolicy = + JsonNamingPolicy.CamelCase + }; - var errorMessage = string.IsNullOrEmpty(ex.Message) ? "Internal Server Error" : ex.Message; + var json = JsonSerializer.Serialize(response, options); - var response = new ApiException(context.Response.StatusCode, errorMessage, ex.StackTrace); + await context.Response.WriteAsync(json); - var options = new JsonSerializerOptions - { - PropertyNamingPolicy = - JsonNamingPolicy.CamelCase - }; - - var json = JsonSerializer.Serialize(response, options); - - await context.Response.WriteAsync(json); - - } } } } diff --git a/API/Program.cs b/API/Program.cs index 1c3568d5e..6e1d3f365 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -7,7 +7,9 @@ using System.Threading.Tasks; using API.Data; using API.Entities; using API.Entities.Enums; +using API.Logging; using API.Services; +using API.SignalR; using Kavita.Common; using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Hosting; @@ -18,35 +20,41 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; +using Serilog; +using Serilog.Events; +using Serilog.Sinks.AspNetCore.SignalR.Extensions; -namespace API +namespace API; + +public class Program { - public class Program - { - private static readonly int HttpPort = Configuration.Port; + private static readonly int HttpPort = Configuration.Port; - protected Program() + protected Program() + { + } + + public static async Task Main(string[] args) + { + Console.OutputEncoding = System.Text.Encoding.UTF8; + Log.Logger = new LoggerConfiguration() + .WriteTo.Console() + .CreateBootstrapLogger(); + + var directoryService = new DirectoryService(null, new FileSystem()); + + // Before anything, check if JWT has been generated properly or if user still has default + if (!Configuration.CheckIfJwtTokenSet() && + Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") != Environments.Development) { + Console.WriteLine("Generating JWT TokenKey for encrypting user sessions..."); + var rBytes = new byte[128]; + RandomNumberGenerator.Create().GetBytes(rBytes); + Configuration.JwtToken = Convert.ToBase64String(rBytes).Replace("/", string.Empty); } - public static async Task Main(string[] args) + try { - Console.OutputEncoding = System.Text.Encoding.UTF8; - var isDocker = new OsInfo(Array.Empty()).IsDocker; - - - var directoryService = new DirectoryService(null, new FileSystem()); - - // Before anything, check if JWT has been generated properly or if user still has default - if (!Configuration.CheckIfJwtTokenSet() && - Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") != Environments.Development) - { - Console.WriteLine("Generating JWT TokenKey for encrypting user sessions..."); - var rBytes = new byte[128]; - RandomNumberGenerator.Create().GetBytes(rBytes); - Configuration.JwtToken = Convert.ToBase64String(rBytes).Replace("/", string.Empty); - } - var host = CreateHostBuilder(args).Build(); using var scope = host.Services.CreateScope(); @@ -78,7 +86,8 @@ namespace API await Seed.SeedThemes(context); await Seed.SeedUserApiKeys(context); - + // NOTE: This check is from v0.4.8 (Nov 04, 2021). We can likely remove this + var isDocker = new OsInfo(Array.Empty()).IsDocker; if (isDocker && new FileInfo("data/appsettings.json").Exists) { logger.LogCritical("WARNING! Mount point is incorrect, nothing here will persist. Please change your container mount from /kavita/data to /kavita/config"); @@ -97,58 +106,76 @@ namespace API return; } + // Update the logger with the log level + var unitOfWork = services.GetRequiredService(); + var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + LogLevelOptions.SwitchLogLevel(settings.LoggingLevel); + await host.RunAsync(); - } - - private static async Task GetMigrationDirectory(DataContext context, IDirectoryService directoryService) + } catch (Exception ex) { - string currentVersion = null; - try - { - if (!await context.ServerSetting.AnyAsync()) return "vUnknown"; - currentVersion = - (await context.ServerSetting.SingleOrDefaultAsync(s => - s.Key == ServerSettingKey.InstallVersion))?.Value; - } - catch (Exception) - { - // ignored - } + Log.Fatal(ex, "Host terminated unexpectedly"); + } finally + { + await Log.CloseAndFlushAsync(); + } + } - if (string.IsNullOrEmpty(currentVersion)) - { - currentVersion = "vUnknown"; - } - - var migrationDirectory = directoryService.FileSystem.Path.Join(directoryService.TempDirectory, - "migration", currentVersion); - return migrationDirectory; + private static async Task GetMigrationDirectory(DataContext context, IDirectoryService directoryService) + { + string currentVersion = null; + try + { + if (!await context.ServerSetting.AnyAsync()) return "vUnknown"; + currentVersion = + (await context.ServerSetting.SingleOrDefaultAsync(s => + s.Key == ServerSettingKey.InstallVersion))?.Value; + } + catch (Exception) + { + // ignored } - private static IHostBuilder CreateHostBuilder(string[] args) => - Host.CreateDefaultBuilder(args) - .ConfigureAppConfiguration((hostingContext, config) => + if (string.IsNullOrEmpty(currentVersion)) + { + currentVersion = "vUnknown"; + } + + var migrationDirectory = directoryService.FileSystem.Path.Join(directoryService.TempDirectory, + "migration", currentVersion); + return migrationDirectory; + } + + private static IHostBuilder CreateHostBuilder(string[] args) => + Host.CreateDefaultBuilder(args) + .UseSerilog((_, services, configuration) => + { + LogLevelOptions.CreateConfig(configuration) + .WriteTo.SignalRSink( + LogEventLevel.Information, + services); + }) + .ConfigureAppConfiguration((hostingContext, config) => + { + config.Sources.Clear(); + + var env = hostingContext.HostingEnvironment; + + config.AddJsonFile("config/appsettings.json", optional: true, reloadOnChange: false) + .AddJsonFile($"config/appsettings.{env.EnvironmentName}.json", + optional: true, reloadOnChange: false); + }) + .ConfigureWebHostDefaults(webBuilder => + { + webBuilder.UseKestrel((opts) => { - config.Sources.Clear(); - - var env = hostingContext.HostingEnvironment; - - config.AddJsonFile("config/appsettings.json", optional: true, reloadOnChange: false) - .AddJsonFile($"config/appsettings.{env.EnvironmentName}.json", - optional: true, reloadOnChange: false); - }) - .ConfigureWebHostDefaults(webBuilder => - { - webBuilder.UseKestrel((opts) => - { - opts.ListenAnyIP(HttpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; }); - }); - - webBuilder.UseStartup(); + opts.ListenAnyIP(HttpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; }); }); + webBuilder.UseStartup(); + }); + - } } diff --git a/API/Services/AccountService.cs b/API/Services/AccountService.cs index 62f5386fb..0d8bed66c 100644 --- a/API/Services/AccountService.cs +++ b/API/Services/AccountService.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; +using API.Constants; using API.Data; using API.Entities; using API.Errors; @@ -9,88 +10,123 @@ using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Logging; -namespace API.Services +namespace API.Services; + +public interface IAccountService { - public interface IAccountService + Task> ChangeUserPassword(AppUser user, string newPassword); + Task> ValidatePassword(AppUser user, string password); + Task> ValidateUsername(string username); + Task> ValidateEmail(string email); + Task HasBookmarkPermission(AppUser user); + Task HasDownloadPermission(AppUser user); +} + +public class AccountService : IAccountService +{ + private readonly UserManager _userManager; + private readonly ILogger _logger; + private readonly IUnitOfWork _unitOfWork; + public const string DefaultPassword = "[k.2@RZ!mxCQkJzE"; + + public AccountService(UserManager userManager, ILogger logger, IUnitOfWork unitOfWork) { - Task> ChangeUserPassword(AppUser user, string newPassword); - Task> ValidatePassword(AppUser user, string password); - Task> ValidateUsername(string username); - Task> ValidateEmail(string email); + _userManager = userManager; + _logger = logger; + _unitOfWork = unitOfWork; } - public class AccountService : IAccountService + public async Task> ChangeUserPassword(AppUser user, string newPassword) { - private readonly UserManager _userManager; - private readonly ILogger _logger; - private readonly IUnitOfWork _unitOfWork; - public const string DefaultPassword = "[k.2@RZ!mxCQkJzE"; + var passwordValidationIssues = (await ValidatePassword(user, newPassword)).ToList(); + if (passwordValidationIssues.Any()) return passwordValidationIssues; - public AccountService(UserManager userManager, ILogger logger, IUnitOfWork unitOfWork) + var result = await _userManager.RemovePasswordAsync(user); + if (!result.Succeeded) { - _userManager = userManager; - _logger = logger; - _unitOfWork = unitOfWork; + _logger.LogError("Could not update password"); + return result.Errors.Select(e => new ApiException(400, e.Code, e.Description)); } - public async Task> ChangeUserPassword(AppUser user, string newPassword) + + result = await _userManager.AddPasswordAsync(user, newPassword); + if (!result.Succeeded) { - var passwordValidationIssues = (await ValidatePassword(user, newPassword)).ToList(); - if (passwordValidationIssues.Any()) return passwordValidationIssues; - - var result = await _userManager.RemovePasswordAsync(user); - if (!result.Succeeded) - { - _logger.LogError("Could not update password"); - return result.Errors.Select(e => new ApiException(400, e.Code, e.Description)); - } - - - result = await _userManager.AddPasswordAsync(user, newPassword); - if (!result.Succeeded) - { - _logger.LogError("Could not update password"); - return result.Errors.Select(e => new ApiException(400, e.Code, e.Description)); - } - - return new List(); + _logger.LogError("Could not update password"); + return result.Errors.Select(e => new ApiException(400, e.Code, e.Description)); } - public async Task> ValidatePassword(AppUser user, string password) - { - foreach (var validator in _userManager.PasswordValidators) - { - var validationResult = await validator.ValidateAsync(_userManager, user, password); - if (!validationResult.Succeeded) - { - return validationResult.Errors.Select(e => new ApiException(400, e.Code, e.Description)); - } - } + return new List(); + } - return Array.Empty(); - } - public async Task> ValidateUsername(string username) + public async Task> ValidatePassword(AppUser user, string password) + { + foreach (var validator in _userManager.PasswordValidators) { - if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == username.ToUpper())) + var validationResult = await validator.ValidateAsync(_userManager, user, password); + if (!validationResult.Succeeded) { - return new List() - { - new ApiException(400, "Username is already taken") - }; + return validationResult.Errors.Select(e => new ApiException(400, e.Code, e.Description)); } - - return Array.Empty(); } - public async Task> ValidateEmail(string email) + return Array.Empty(); + } + public async Task> ValidateUsername(string username) + { + if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == username.ToUpper())) { - var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(email); - if (user == null) return Array.Empty(); - return new List() { - new ApiException(400, "Email is already registered") + new ApiException(400, "Username is already taken") }; } + + return Array.Empty(); } + + public async Task> ValidateEmail(string email) + { + var user = await _unitOfWork.UserRepository.GetUserByEmailAsync(email); + if (user == null) return Array.Empty(); + + return new List() + { + new ApiException(400, "Email is already registered") + }; + } + + /// + /// Does the user have the Bookmark permission or admin rights + /// + /// + /// + public async Task HasBookmarkPermission(AppUser user) + { + var roles = await _userManager.GetRolesAsync(user); + return roles.Contains(PolicyConstants.BookmarkRole) || roles.Contains(PolicyConstants.AdminRole); + } + + /// + /// Does the user have the Download permission or admin rights + /// + /// + /// + public async Task HasDownloadPermission(AppUser user) + { + var roles = await _userManager.GetRolesAsync(user); + return roles.Contains(PolicyConstants.DownloadRole) || roles.Contains(PolicyConstants.AdminRole); + } + + /// + /// Does the user have Change Restriction permission or admin rights + /// + /// + /// + public async Task HasChangeRestrictionRole(AppUser user) + { + var roles = await _userManager.GetRolesAsync(user); + return roles.Contains(PolicyConstants.ChangePasswordRole) || roles.Contains(PolicyConstants.AdminRole); + } + } diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 58a2b0aae..b370f178d 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -14,479 +14,478 @@ using Microsoft.Extensions.Logging; using SharpCompress.Archives; using SharpCompress.Common; -namespace API.Services +namespace API.Services; + +public interface IArchiveService { - public interface IArchiveService + void ExtractArchive(string archivePath, string extractPath); + int GetNumberOfPagesFromArchive(string archivePath); + string GetCoverImage(string archivePath, string fileName, string outputDirectory); + bool IsValidArchive(string archivePath); + ComicInfo GetComicInfo(string archivePath); + ArchiveLibrary CanOpen(string archivePath); + bool ArchiveNeedsFlattening(ZipArchive archive); + /// + /// Creates a zip file form the listed files and outputs to the temp folder. + /// + /// List of files to be zipped up. Should be full file paths. + /// Temp folder name to use for preparing the files. Will be created and deleted + /// Path to the temp zip + /// + string CreateZipForDownload(IEnumerable files, string tempFolder); +} + +/// +/// Responsible for manipulating Archive files. Used by and +/// +// ReSharper disable once ClassWithVirtualMembersNeverInherited.Global +public class ArchiveService : IArchiveService +{ + private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; + private readonly IImageService _imageService; + private const string ComicInfoFilename = "ComicInfo.xml"; + + public ArchiveService(ILogger logger, IDirectoryService directoryService, IImageService imageService) { - void ExtractArchive(string archivePath, string extractPath); - int GetNumberOfPagesFromArchive(string archivePath); - string GetCoverImage(string archivePath, string fileName, string outputDirectory); - bool IsValidArchive(string archivePath); - ComicInfo GetComicInfo(string archivePath); - ArchiveLibrary CanOpen(string archivePath); - bool ArchiveNeedsFlattening(ZipArchive archive); - /// - /// Creates a zip file form the listed files and outputs to the temp folder. - /// - /// List of files to be zipped up. Should be full file paths. - /// Temp folder name to use for preparing the files. Will be created and deleted - /// Path to the temp zip - /// - string CreateZipForDownload(IEnumerable files, string tempFolder); + _logger = logger; + _directoryService = directoryService; + _imageService = imageService; } /// - /// Responsible for manipulating Archive files. Used by and + /// Checks if a File can be opened. Requires up to 2 opens of the filestream. /// - // ReSharper disable once ClassWithVirtualMembersNeverInherited.Global - public class ArchiveService : IArchiveService + /// + /// + public virtual ArchiveLibrary CanOpen(string archivePath) { - private readonly ILogger _logger; - private readonly IDirectoryService _directoryService; - private readonly IImageService _imageService; - private const string ComicInfoFilename = "comicinfo"; + if (string.IsNullOrEmpty(archivePath) || !(File.Exists(archivePath) && Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; - public ArchiveService(ILogger logger, IDirectoryService directoryService, IImageService imageService) + var ext = _directoryService.FileSystem.Path.GetExtension(archivePath).ToUpper(); + if (ext.Equals(".CBR") || ext.Equals(".RAR")) return ArchiveLibrary.SharpCompress; + + try { - _logger = logger; - _directoryService = directoryService; - _imageService = imageService; + using var a2 = ZipFile.OpenRead(archivePath); + return ArchiveLibrary.Default; } - - /// - /// Checks if a File can be opened. Requires up to 2 opens of the filestream. - /// - /// - /// - public virtual ArchiveLibrary CanOpen(string archivePath) + catch (Exception) { - if (string.IsNullOrEmpty(archivePath) || !(File.Exists(archivePath) && Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; - - var ext = _directoryService.FileSystem.Path.GetExtension(archivePath).ToUpper(); - if (ext.Equals(".CBR") || ext.Equals(".RAR")) return ArchiveLibrary.SharpCompress; - try { - using var a2 = ZipFile.OpenRead(archivePath); - return ArchiveLibrary.Default; + using var a1 = ArchiveFactory.Open(archivePath); + return ArchiveLibrary.SharpCompress; } catch (Exception) { - try + return ArchiveLibrary.NotSupported; + } + } + } + + public int GetNumberOfPagesFromArchive(string archivePath) + { + if (!IsValidArchive(archivePath)) + { + _logger.LogError("Archive {ArchivePath} could not be found", archivePath); + return 0; + } + + try + { + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) + { + case ArchiveLibrary.Default: { - using var a1 = ArchiveFactory.Open(archivePath); - return ArchiveLibrary.SharpCompress; + using var archive = ZipFile.OpenRead(archivePath); + return archive.Entries.Count(e => !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Tasks.Scanner.Parser.Parser.IsImage(e.FullName)); } - catch (Exception) + case ArchiveLibrary.SharpCompress: { - return ArchiveLibrary.NotSupported; + using var archive = ArchiveFactory.Open(archivePath); + return archive.Entries.Count(entry => !entry.IsDirectory && + !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) + && Tasks.Scanner.Parser.Parser.IsImage(entry.Key)); } + case ArchiveLibrary.NotSupported: + _logger.LogWarning("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + return 0; + default: + _logger.LogWarning("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + return 0; + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + return 0; + } + } + + /// + /// Finds the first instance of a folder entry and returns it + /// + /// + /// Entry name of match, null if no match + public static string FindFolderEntry(IEnumerable entryFullNames) + { + var result = entryFullNames + .Where(path => !(Path.EndsInDirectorySeparator(path) || Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(path) || path.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith))) + .OrderByNatural(Path.GetFileNameWithoutExtension) + .FirstOrDefault(Tasks.Scanner.Parser.Parser.IsCoverImage); + + return string.IsNullOrEmpty(result) ? null : result; + } + + /// + /// Returns first entry that is an image and is not in a blacklisted folder path. Uses for ordering files + /// + /// + /// + /// Entry name of match, null if no match + public static string? FirstFileEntry(IEnumerable entryFullNames, string archiveName) + { + // First check if there are any files that are not in a nested folder before just comparing by filename. This is needed + // because NaturalSortComparer does not work with paths and doesn't seem 001.jpg as before chapter 1/001.jpg. + var fullNames = entryFullNames + .Where(path => !(Path.EndsInDirectorySeparator(path) || Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(path) || path.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith)) && Tasks.Scanner.Parser.Parser.IsImage(path)) + .OrderByNatural(c => c.GetFullPathWithoutExtension()) + .ToList(); + if (fullNames.Count == 0) return null; + + var nonNestedFile = fullNames.Where(entry => (Path.GetDirectoryName(entry) ?? string.Empty).Equals(archiveName)) + .OrderByNatural(c => c.GetFullPathWithoutExtension()) + .FirstOrDefault(); + + if (!string.IsNullOrEmpty(nonNestedFile)) return nonNestedFile; + + // Check the first folder and sort within that to see if we can find a file, else fallback to first file with basic sort. + // Get first folder, then sort within that + var firstDirectoryFile = fullNames.OrderByNatural(Path.GetDirectoryName).FirstOrDefault(); + if (!string.IsNullOrEmpty(firstDirectoryFile)) + { + var firstDirectory = Path.GetDirectoryName(firstDirectoryFile); + if (!string.IsNullOrEmpty(firstDirectory)) + { + var firstDirectoryResult = fullNames.Where(f => firstDirectory.Equals(Path.GetDirectoryName(f))) + .OrderByNatural(Path.GetFileNameWithoutExtension) + .FirstOrDefault(); + + if (!string.IsNullOrEmpty(firstDirectoryResult)) return firstDirectoryResult; } } - public int GetNumberOfPagesFromArchive(string archivePath) - { - if (!IsValidArchive(archivePath)) - { - _logger.LogError("Archive {ArchivePath} could not be found", archivePath); - return 0; - } + var result = fullNames + .OrderByNatural(Path.GetFileNameWithoutExtension) + .FirstOrDefault(); - try + return string.IsNullOrEmpty(result) ? null : result; + } + + + /// + /// Generates byte array of cover image. + /// Given a path to a compressed file , will ensure the first image (respects directory structure) is returned unless + /// a folder/cover.(image extension) exists in the the compressed file (if duplicate, the first is chosen) + /// + /// This skips over any __MACOSX folder/file iteration. + /// + /// This always creates a thumbnail + /// + /// File name to use based on context of entity. + /// Where to output the file, defaults to covers directory + /// + public string GetCoverImage(string archivePath, string fileName, string outputDirectory) + { + if (archivePath == null || !IsValidArchive(archivePath)) return string.Empty; + try + { + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) { - var libraryHandler = CanOpen(archivePath); - switch (libraryHandler) + case ArchiveLibrary.Default: { - case ArchiveLibrary.Default: - { - using var archive = ZipFile.OpenRead(archivePath); - return archive.Entries.Count(e => !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Tasks.Scanner.Parser.Parser.IsImage(e.FullName)); - } - case ArchiveLibrary.SharpCompress: - { - using var archive = ArchiveFactory.Open(archivePath); - return archive.Entries.Count(entry => !entry.IsDirectory && - !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) - && Tasks.Scanner.Parser.Parser.IsImage(entry.Key)); - } - case ArchiveLibrary.NotSupported: - _logger.LogWarning("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); - return 0; - default: - _logger.LogWarning("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); - return 0; + using var archive = ZipFile.OpenRead(archivePath); + + var entryName = FindCoverImageFilename(archivePath, archive.Entries.Select(e => e.FullName)); + var entry = archive.Entries.Single(e => e.FullName == entryName); + + using var stream = entry.Open(); + return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); - return 0; - } - } - - /// - /// Finds the first instance of a folder entry and returns it - /// - /// - /// Entry name of match, null if no match - public static string FindFolderEntry(IEnumerable entryFullNames) - { - var result = entryFullNames - .Where(path => !(Path.EndsInDirectorySeparator(path) || Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(path) || path.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith))) - .OrderByNatural(Path.GetFileNameWithoutExtension) - .FirstOrDefault(Tasks.Scanner.Parser.Parser.IsCoverImage); - - return string.IsNullOrEmpty(result) ? null : result; - } - - /// - /// Returns first entry that is an image and is not in a blacklisted folder path. Uses for ordering files - /// - /// - /// - /// Entry name of match, null if no match - public static string? FirstFileEntry(IEnumerable entryFullNames, string archiveName) - { - // First check if there are any files that are not in a nested folder before just comparing by filename. This is needed - // because NaturalSortComparer does not work with paths and doesn't seem 001.jpg as before chapter 1/001.jpg. - var fullNames = entryFullNames - .Where(path => !(Path.EndsInDirectorySeparator(path) || Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(path) || path.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith)) && Tasks.Scanner.Parser.Parser.IsImage(path)) - .OrderByNatural(c => c.GetFullPathWithoutExtension()) - .ToList(); - if (fullNames.Count == 0) return null; - - var nonNestedFile = fullNames.Where(entry => (Path.GetDirectoryName(entry) ?? string.Empty).Equals(archiveName)) - .OrderByNatural(c => c.GetFullPathWithoutExtension()) - .FirstOrDefault(); - - if (!string.IsNullOrEmpty(nonNestedFile)) return nonNestedFile; - - // Check the first folder and sort within that to see if we can find a file, else fallback to first file with basic sort. - // Get first folder, then sort within that - var firstDirectoryFile = fullNames.OrderByNatural(Path.GetDirectoryName).FirstOrDefault(); - if (!string.IsNullOrEmpty(firstDirectoryFile)) - { - var firstDirectory = Path.GetDirectoryName(firstDirectoryFile); - if (!string.IsNullOrEmpty(firstDirectory)) + case ArchiveLibrary.SharpCompress: { - var firstDirectoryResult = fullNames.Where(f => firstDirectory.Equals(Path.GetDirectoryName(f))) - .OrderByNatural(Path.GetFileNameWithoutExtension) - .FirstOrDefault(); + using var archive = ArchiveFactory.Open(archivePath); + var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList(); - if (!string.IsNullOrEmpty(firstDirectoryResult)) return firstDirectoryResult; + var entryName = FindCoverImageFilename(archivePath, entryNames); + var entry = archive.Entries.Single(e => e.Key == entryName); + + using var stream = entry.OpenEntryStream(); + return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); } + case ArchiveLibrary.NotSupported: + _logger.LogWarning("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath); + return string.Empty; + default: + _logger.LogWarning("[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath); + return string.Empty; } - - var result = fullNames - .OrderByNatural(Path.GetFileNameWithoutExtension) - .FirstOrDefault(); - - return string.IsNullOrEmpty(result) ? null : result; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath); } + return string.Empty; + } - /// - /// Generates byte array of cover image. - /// Given a path to a compressed file , will ensure the first image (respects directory structure) is returned unless - /// a folder/cover.(image extension) exists in the the compressed file (if duplicate, the first is chosen) - /// - /// This skips over any __MACOSX folder/file iteration. - /// - /// This always creates a thumbnail - /// - /// File name to use based on context of entity. - /// Where to output the file, defaults to covers directory - /// - public string GetCoverImage(string archivePath, string fileName, string outputDirectory) + /// + /// Given a list of image paths (assume within an archive), find the filename that corresponds to the cover + /// + /// + /// + /// + public static string FindCoverImageFilename(string archivePath, IEnumerable entryNames) + { + var entryName = FindFolderEntry(entryNames) ?? FirstFileEntry(entryNames, Path.GetFileName(archivePath)); + return entryName; + } + + /// + /// Given an archive stream, will assess whether directory needs to be flattened so that the extracted archive files are directly + /// under extract path and not nested in subfolders. See Flatten method. + /// + /// An opened archive stream + /// + public bool ArchiveNeedsFlattening(ZipArchive archive) + { + // Sometimes ZipArchive will list the directory and others it will just keep it in the FullName + return archive.Entries.Count > 0 && + !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || + archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); + } + + /// + /// Creates a zip file form the listed files and outputs to the temp folder. + /// + /// List of files to be zipped up. Should be full file paths. + /// Temp folder name to use for preparing the files. Will be created and deleted + /// Path to the temp zip + /// + public string CreateZipForDownload(IEnumerable files, string tempFolder) + { + var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); + + var tempLocation = Path.Join(_directoryService.TempDirectory, $"{tempFolder}_{dateString}"); + var potentialExistingFile = _directoryService.FileSystem.FileInfo.FromFileName(Path.Join(_directoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip")); + if (potentialExistingFile.Exists) { - if (archivePath == null || !IsValidArchive(archivePath)) return string.Empty; - try - { - var libraryHandler = CanOpen(archivePath); - switch (libraryHandler) - { - case ArchiveLibrary.Default: - { - using var archive = ZipFile.OpenRead(archivePath); - - var entryName = FindCoverImageFilename(archivePath, archive.Entries.Select(e => e.FullName)); - var entry = archive.Entries.Single(e => e.FullName == entryName); - - using var stream = entry.Open(); - return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); - } - case ArchiveLibrary.SharpCompress: - { - using var archive = ArchiveFactory.Open(archivePath); - var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList(); - - var entryName = FindCoverImageFilename(archivePath, entryNames); - var entry = archive.Entries.Single(e => e.Key == entryName); - - using var stream = entry.OpenEntryStream(); - return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); - } - case ArchiveLibrary.NotSupported: - _logger.LogWarning("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath); - return string.Empty; - default: - _logger.LogWarning("[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath); - return string.Empty; - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[GetCoverImage] There was an exception when reading archive stream: {ArchivePath}. Defaulting to no cover image", archivePath); - } - - return string.Empty; + // A previous download exists, just return it immediately + return potentialExistingFile.FullName; } - /// - /// Given a list of image paths (assume within an archive), find the filename that corresponds to the cover - /// - /// - /// - /// - public static string FindCoverImageFilename(string archivePath, IEnumerable entryNames) + _directoryService.ExistOrCreate(tempLocation); + + if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) { - var entryName = FindFolderEntry(entryNames) ?? FirstFileEntry(entryNames, Path.GetFileName(archivePath)); - return entryName; + throw new KavitaException("Unable to copy files to temp directory archive download."); } - /// - /// Given an archive stream, will assess whether directory needs to be flattened so that the extracted archive files are directly - /// under extract path and not nested in subfolders. See Flatten method. - /// - /// An opened archive stream - /// - public bool ArchiveNeedsFlattening(ZipArchive archive) + var zipPath = Path.Join(_directoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); + try { - // Sometimes ZipArchive will list the directory and others it will just keep it in the FullName - return archive.Entries.Count > 0 && - !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || - archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); + ZipFile.CreateFromDirectory(tempLocation, zipPath); + } + catch (AggregateException ex) + { + _logger.LogError(ex, "There was an issue creating temp archive"); + throw new KavitaException("There was an issue creating temp archive"); } - /// - /// Creates a zip file form the listed files and outputs to the temp folder. - /// - /// List of files to be zipped up. Should be full file paths. - /// Temp folder name to use for preparing the files. Will be created and deleted - /// Path to the temp zip - /// - public string CreateZipForDownload(IEnumerable files, string tempFolder) + return zipPath; + } + + + /// + /// Test if the archive path exists and an archive + /// + /// + /// + public bool IsValidArchive(string archivePath) + { + if (!File.Exists(archivePath)) { - var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); - - var tempLocation = Path.Join(_directoryService.TempDirectory, $"{tempFolder}_{dateString}"); - var potentialExistingFile = _directoryService.FileSystem.FileInfo.FromFileName(Path.Join(_directoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip")); - if (potentialExistingFile.Exists) - { - // A previous download exists, just return it immediately - return potentialExistingFile.FullName; - } - - _directoryService.ExistOrCreate(tempLocation); - - if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) - { - throw new KavitaException("Unable to copy files to temp directory archive download."); - } - - var zipPath = Path.Join(_directoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); - try - { - ZipFile.CreateFromDirectory(tempLocation, zipPath); - } - catch (AggregateException ex) - { - _logger.LogError(ex, "There was an issue creating temp archive"); - throw new KavitaException("There was an issue creating temp archive"); - } - - return zipPath; - } - - - /// - /// Test if the archive path exists and an archive - /// - /// - /// - public bool IsValidArchive(string archivePath) - { - if (!File.Exists(archivePath)) - { - _logger.LogWarning("Archive {ArchivePath} could not be found", archivePath); - return false; - } - - if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath)) return true; - - _logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath); + _logger.LogWarning("Archive {ArchivePath} could not be found", archivePath); return false; } - private static bool ValidComicInfoArchiveEntry(string fullName, string name) - { - var filenameWithoutExtension = Path.GetFileNameWithoutExtension(name).ToLower(); - return !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(fullName) - && filenameWithoutExtension.Equals(ComicInfoFilename, StringComparison.InvariantCultureIgnoreCase) - && !filenameWithoutExtension.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith) - && Tasks.Scanner.Parser.Parser.IsXml(name); - } + if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath)) return true; - /// - /// This can be null if nothing is found or any errors occur during access - /// - /// - /// - public ComicInfo? GetComicInfo(string archivePath) - { - if (!IsValidArchive(archivePath)) return null; + _logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath); + return false; + } - try + private static bool IsComicInfoArchiveEntry(string fullName, string name) + { + return !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(fullName) + && name.Equals(ComicInfoFilename, StringComparison.OrdinalIgnoreCase) + && !name.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith); + } + + /// + /// This can be null if nothing is found or any errors occur during access + /// + /// + /// + public ComicInfo? GetComicInfo(string archivePath) + { + if (!IsValidArchive(archivePath)) return null; + + try + { + if (!File.Exists(archivePath)) return null; + + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) { - if (!File.Exists(archivePath)) return null; - - var libraryHandler = CanOpen(archivePath); - switch (libraryHandler) + case ArchiveLibrary.Default: { - case ArchiveLibrary.Default: + using var archive = ZipFile.OpenRead(archivePath); + + var entry = archive.Entries.FirstOrDefault(x => (x.FullName ?? x.Name) == ComicInfoFilename) ?? + archive.Entries.FirstOrDefault(x => IsComicInfoArchiveEntry(x.FullName, x.Name)); + if (entry != null) { - using var archive = ZipFile.OpenRead(archivePath); - - var entry = archive.Entries.FirstOrDefault(x => ValidComicInfoArchiveEntry(x.FullName, x.Name)); - if (entry != null) - { - using var stream = entry.Open(); - var serializer = new XmlSerializer(typeof(ComicInfo)); - var info = (ComicInfo) serializer.Deserialize(stream); - ComicInfo.CleanComicInfo(info); - return info; - } - - break; + using var stream = entry.Open(); + var serializer = new XmlSerializer(typeof(ComicInfo)); + var info = (ComicInfo) serializer.Deserialize(stream); + ComicInfo.CleanComicInfo(info); + return info; } - case ArchiveLibrary.SharpCompress: - { - using var archive = ArchiveFactory.Open(archivePath); - var entry = archive.Entries.FirstOrDefault(entry => - ValidComicInfoArchiveEntry(Path.GetDirectoryName(entry.Key), entry.Key)); - if (entry != null) - { - using var stream = entry.OpenEntryStream(); - var serializer = new XmlSerializer(typeof(ComicInfo)); - var info = (ComicInfo) serializer.Deserialize(stream); - ComicInfo.CleanComicInfo(info); - return info; - } - - break; - } - case ArchiveLibrary.NotSupported: - _logger.LogWarning("[GetComicInfo] This archive cannot be read: {ArchivePath}", archivePath); - return null; - default: - _logger.LogWarning( - "[GetComicInfo] There was an exception when reading archive stream: {ArchivePath}", - archivePath); - return null; + break; } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[GetComicInfo] There was an exception when reading archive stream: {Filepath}", archivePath); - } - - return null; - } - - - private void ExtractArchiveEntities(IEnumerable entries, string extractPath) - { - _directoryService.ExistOrCreate(extractPath); - foreach (var entry in entries) - { - entry.WriteToDirectory(extractPath, new ExtractionOptions() + case ArchiveLibrary.SharpCompress: { - ExtractFullPath = true, // Don't flatten, let the flatterner ensure correct order of nested folders - Overwrite = false - }); - } - } + using var archive = ArchiveFactory.Open(archivePath); + var entry = archive.Entries.FirstOrDefault(entry => entry.Key == ComicInfoFilename) ?? + archive.Entries.FirstOrDefault(entry => + IsComicInfoArchiveEntry(Path.GetDirectoryName(entry.Key), entry.Key)); - private void ExtractArchiveEntries(ZipArchive archive, string extractPath) - { - var needsFlattening = ArchiveNeedsFlattening(archive); - if (!archive.HasFiles() && !needsFlattening) return; - - archive.ExtractToDirectory(extractPath, true); - if (!needsFlattening) return; - - _logger.LogDebug("Extracted archive is nested in root folder, flattening..."); - _directoryService.Flatten(extractPath); - } - - /// - /// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists, - /// will return that without performing an extraction. Returns empty string if there are any invalidations which would - /// prevent operations to perform correctly (missing archivePath file, empty archive, etc). - /// - /// A valid file to an archive file. - /// Path to extract to - /// - public void ExtractArchive(string archivePath, string extractPath) - { - if (!IsValidArchive(archivePath)) return; - - if (Directory.Exists(extractPath)) return; - - if (!_directoryService.FileSystem.File.Exists(archivePath)) - { - _logger.LogError("{Archive} does not exist on disk", archivePath); - throw new KavitaException($"{archivePath} does not exist on disk"); - } - - var sw = Stopwatch.StartNew(); - - try - { - var libraryHandler = CanOpen(archivePath); - switch (libraryHandler) - { - case ArchiveLibrary.Default: + if (entry != null) { - using var archive = ZipFile.OpenRead(archivePath); - ExtractArchiveEntries(archive, extractPath); - break; + using var stream = entry.OpenEntryStream(); + var serializer = new XmlSerializer(typeof(ComicInfo)); + var info = (ComicInfo) serializer.Deserialize(stream); + ComicInfo.CleanComicInfo(info); + return info; } - case ArchiveLibrary.SharpCompress: - { - using var archive = ArchiveFactory.Open(archivePath); - ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory - && !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) - && Tasks.Scanner.Parser.Parser.IsImage(entry.Key)), extractPath); - break; - } - case ArchiveLibrary.NotSupported: - _logger.LogWarning("[ExtractArchive] This archive cannot be read: {ArchivePath}", archivePath); - return; - default: - _logger.LogWarning("[ExtractArchive] There was an exception when reading archive stream: {ArchivePath}", archivePath); - return; + + break; } + case ArchiveLibrary.NotSupported: + _logger.LogWarning("[GetComicInfo] This archive cannot be read: {ArchivePath}", archivePath); + return null; + default: + _logger.LogWarning( + "[GetComicInfo] There was an exception when reading archive stream: {ArchivePath}", + archivePath); + return null; + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[GetComicInfo] There was an exception when reading archive stream: {Filepath}", archivePath); + } - } - catch (Exception e) + return null; + } + + + private void ExtractArchiveEntities(IEnumerable entries, string extractPath) + { + _directoryService.ExistOrCreate(extractPath); + foreach (var entry in entries) + { + entry.WriteToDirectory(extractPath, new ExtractionOptions() { - _logger.LogWarning(e, "[ExtractArchive] There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); - throw new KavitaException( - $"There was an error when extracting {archivePath}. Check the file exists, has read permissions or the server OS can support all path characters."); - } - _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); + ExtractFullPath = true, // Don't flatten, let the flatterner ensure correct order of nested folders + Overwrite = false + }); } } + + private void ExtractArchiveEntries(ZipArchive archive, string extractPath) + { + var needsFlattening = ArchiveNeedsFlattening(archive); + if (!archive.HasFiles() && !needsFlattening) return; + + archive.ExtractToDirectory(extractPath, true); + if (!needsFlattening) return; + + _logger.LogDebug("Extracted archive is nested in root folder, flattening..."); + _directoryService.Flatten(extractPath); + } + + /// + /// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists, + /// will return that without performing an extraction. Returns empty string if there are any invalidations which would + /// prevent operations to perform correctly (missing archivePath file, empty archive, etc). + /// + /// A valid file to an archive file. + /// Path to extract to + /// + public void ExtractArchive(string archivePath, string extractPath) + { + if (!IsValidArchive(archivePath)) return; + + if (Directory.Exists(extractPath)) return; + + if (!_directoryService.FileSystem.File.Exists(archivePath)) + { + _logger.LogError("{Archive} does not exist on disk", archivePath); + throw new KavitaException($"{archivePath} does not exist on disk"); + } + + var sw = Stopwatch.StartNew(); + + try + { + var libraryHandler = CanOpen(archivePath); + switch (libraryHandler) + { + case ArchiveLibrary.Default: + { + using var archive = ZipFile.OpenRead(archivePath); + ExtractArchiveEntries(archive, extractPath); + break; + } + case ArchiveLibrary.SharpCompress: + { + using var archive = ArchiveFactory.Open(archivePath); + ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory + && !Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) + && Tasks.Scanner.Parser.Parser.IsImage(entry.Key)), extractPath); + break; + } + case ArchiveLibrary.NotSupported: + _logger.LogWarning("[ExtractArchive] This archive cannot be read: {ArchivePath}", archivePath); + return; + default: + _logger.LogWarning("[ExtractArchive] There was an exception when reading archive stream: {ArchivePath}", archivePath); + return; + } + + } + catch (Exception e) + { + _logger.LogWarning(e, "[ExtractArchive] There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); + throw new KavitaException( + $"There was an error when extracting {archivePath}. Check the file exists, has read permissions or the server OS can support all path characters."); + } + _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); + } } diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index d28183f9e..728b6f8ff 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -26,962 +26,957 @@ using VersOne.Epub; using VersOne.Epub.Options; using Image = SixLabors.ImageSharp.Image; -namespace API.Services +namespace API.Services; + +public interface IBookService { - public interface IBookService + int GetNumberOfPages(string filePath); + string GetCoverImage(string fileFilePath, string fileName, string outputDirectory); + Task> CreateKeyToPageMappingAsync(EpubBookRef book); + + /// + /// Scopes styles to .reading-section and replaces img src to the passed apiBase + /// + /// + /// + /// If the stylesheetHtml contains Import statements, when scoping the filename, scope needs to be wrt filepath. + /// Book Reference, needed for if you expect Import statements + /// + Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book); + ComicInfo GetComicInfo(string filePath); + ParserInfo ParseInfo(string filePath); + /// + /// Extracts a PDF file's pages as images to an target directory + /// + /// + /// Where the files will be extracted to. If doesn't exist, will be created. + [Obsolete("This method of reading is no longer supported. Please use native pdf reader")] + void ExtractPdfImages(string fileFilePath, string targetDirectory); + + Task ScopePage(HtmlDocument doc, EpubBookRef book, string apiBase, HtmlNode body, Dictionary mappings, int page); + Task> GenerateTableOfContents(Chapter chapter); + + Task GetBookPage(int page, int chapterId, string cachedEpubPath, string baseUrl); +} + +public class BookService : IBookService +{ + private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; + private readonly IImageService _imageService; + private readonly StylesheetParser _cssParser = new (); + private static readonly RecyclableMemoryStreamManager StreamManager = new (); + private const string CssScopeClass = ".book-content"; + private const string BookApiUrl = "book-resources?file="; + public static readonly EpubReaderOptions BookReaderOptions = new() { - int GetNumberOfPages(string filePath); - string GetCoverImage(string fileFilePath, string fileName, string outputDirectory); - Task> CreateKeyToPageMappingAsync(EpubBookRef book); + PackageReaderOptions = new PackageReaderOptions() + { + IgnoreMissingToc = true + } + }; - /// - /// Scopes styles to .reading-section and replaces img src to the passed apiBase - /// - /// - /// - /// If the stylesheetHtml contains Import statements, when scoping the filename, scope needs to be wrt filepath. - /// Book Reference, needed for if you expect Import statements - /// - Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book); - ComicInfo GetComicInfo(string filePath); - ParserInfo ParseInfo(string filePath); - /// - /// Extracts a PDF file's pages as images to an target directory - /// - /// - /// Where the files will be extracted to. If doesn't exist, will be created. - [Obsolete("This method of reading is no longer supported. Please use native pdf reader")] - void ExtractPdfImages(string fileFilePath, string targetDirectory); - - Task ScopePage(HtmlDocument doc, EpubBookRef book, string apiBase, HtmlNode body, Dictionary mappings, int page); - Task> GenerateTableOfContents(Chapter chapter); - - Task GetBookPage(int page, int chapterId, string cachedEpubPath, string baseUrl); + public BookService(ILogger logger, IDirectoryService directoryService, IImageService imageService) + { + _logger = logger; + _directoryService = directoryService; + _imageService = imageService; } - public class BookService : IBookService + private static bool HasClickableHrefPart(HtmlNode anchor) { - private readonly ILogger _logger; - private readonly IDirectoryService _directoryService; - private readonly IImageService _imageService; - private readonly StylesheetParser _cssParser = new (); - private static readonly RecyclableMemoryStreamManager StreamManager = new (); - private const string CssScopeClass = ".book-content"; - private const string BookApiUrl = "book-resources?file="; - public static readonly EpubReaderOptions BookReaderOptions = new() - { - PackageReaderOptions = new PackageReaderOptions() - { - IgnoreMissingToc = true - } - }; + return anchor.GetAttributeValue("href", string.Empty).Contains("#") + && anchor.GetAttributeValue("tabindex", string.Empty) != "-1" + && anchor.GetAttributeValue("role", string.Empty) != "presentation"; + } - public BookService(ILogger logger, IDirectoryService directoryService, IImageService imageService) + public static string GetContentType(EpubContentType type) + { + string contentType; + switch (type) { - _logger = logger; - _directoryService = directoryService; - _imageService = imageService; + case EpubContentType.IMAGE_GIF: + contentType = "image/gif"; + break; + case EpubContentType.IMAGE_PNG: + contentType = "image/png"; + break; + case EpubContentType.IMAGE_JPEG: + contentType = "image/jpeg"; + break; + case EpubContentType.FONT_OPENTYPE: + contentType = "font/otf"; + break; + case EpubContentType.FONT_TRUETYPE: + contentType = "font/ttf"; + break; + case EpubContentType.IMAGE_SVG: + contentType = "image/svg+xml"; + break; + default: + contentType = "application/octet-stream"; + break; } - private static bool HasClickableHrefPart(HtmlNode anchor) + return contentType; + } + + private static void UpdateLinks(HtmlNode anchor, Dictionary mappings, int currentPage) + { + if (anchor.Name != "a") return; + var hrefParts = CleanContentKeys(anchor.GetAttributeValue("href", string.Empty)) + .Split("#"); + // Some keys get uri encoded when parsed, so replace any of those characters with original + var mappingKey = HttpUtility.UrlDecode(hrefParts[0]); + + if (!mappings.ContainsKey(mappingKey)) { - return anchor.GetAttributeValue("href", string.Empty).Contains("#") - && anchor.GetAttributeValue("tabindex", string.Empty) != "-1" - && anchor.GetAttributeValue("role", string.Empty) != "presentation"; + if (HasClickableHrefPart(anchor)) + { + var part = hrefParts.Length > 1 + ? hrefParts[1] + : anchor.GetAttributeValue("href", string.Empty); + anchor.Attributes.Add("kavita-page", $"{currentPage}"); + anchor.Attributes.Add("kavita-part", part); + anchor.Attributes.Remove("href"); + anchor.Attributes.Add("href", "javascript:void(0)"); + } + else + { + anchor.Attributes.Add("target", "_blank"); + anchor.Attributes.Add("rel", "noreferrer noopener"); + } + + return; } - public static string GetContentType(EpubContentType type) + var mappedPage = mappings[mappingKey]; + anchor.Attributes.Add("kavita-page", $"{mappedPage}"); + if (hrefParts.Length > 1) { - string contentType; - switch (type) - { - case EpubContentType.IMAGE_GIF: - contentType = "image/gif"; - break; - case EpubContentType.IMAGE_PNG: - contentType = "image/png"; - break; - case EpubContentType.IMAGE_JPEG: - contentType = "image/jpeg"; - break; - case EpubContentType.FONT_OPENTYPE: - contentType = "font/otf"; - break; - case EpubContentType.FONT_TRUETYPE: - contentType = "font/ttf"; - break; - case EpubContentType.IMAGE_SVG: - contentType = "image/svg+xml"; - break; - default: - contentType = "application/octet-stream"; - break; - } - - return contentType; + anchor.Attributes.Add("kavita-part", + hrefParts[1]); } - private static void UpdateLinks(HtmlNode anchor, Dictionary mappings, int currentPage) + anchor.Attributes.Remove("href"); + anchor.Attributes.Add("href", "javascript:void(0)"); + } + + public async Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book) + { + // @Import statements will be handled by browser, so we must inline the css into the original file that request it, so they can be Scoped + var prepend = filename.Length > 0 ? filename.Replace(Path.GetFileName(filename), string.Empty) : string.Empty; + var importBuilder = new StringBuilder(); + foreach (Match match in Tasks.Scanner.Parser.Parser.CssImportUrlRegex.Matches(stylesheetHtml)) { - if (anchor.Name != "a") return; - var hrefParts = CleanContentKeys(anchor.GetAttributeValue("href", string.Empty)) - .Split("#"); - // Some keys get uri encoded when parsed, so replace any of those characters with original - var mappingKey = HttpUtility.UrlDecode(hrefParts[0]); + if (!match.Success) continue; - if (!mappings.ContainsKey(mappingKey)) + var importFile = match.Groups["Filename"].Value; + var key = CleanContentKeys(importFile); + if (!key.Contains(prepend)) { - if (HasClickableHrefPart(anchor)) - { - var part = hrefParts.Length > 1 - ? hrefParts[1] - : anchor.GetAttributeValue("href", string.Empty); - anchor.Attributes.Add("kavita-page", $"{currentPage}"); - anchor.Attributes.Add("kavita-part", part); - anchor.Attributes.Remove("href"); - anchor.Attributes.Add("href", "javascript:void(0)"); - } - else - { - anchor.Attributes.Add("target", "_blank"); - anchor.Attributes.Add("rel", "noreferrer noopener"); - } - - return; + key = prepend + key; } + if (!book.Content.AllFiles.ContainsKey(key)) continue; - var mappedPage = mappings[mappingKey]; - anchor.Attributes.Add("kavita-page", $"{mappedPage}"); - if (hrefParts.Length > 1) - { - anchor.Attributes.Add("kavita-part", - hrefParts[1]); - } - - anchor.Attributes.Remove("href"); - anchor.Attributes.Add("href", "javascript:void(0)"); + var bookFile = book.Content.AllFiles[key]; + var content = await bookFile.ReadContentAsBytesAsync(); + importBuilder.Append(Encoding.UTF8.GetString(content)); } - public async Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book) + stylesheetHtml = stylesheetHtml.Insert(0, importBuilder.ToString()); + + EscapeCssImportReferences(ref stylesheetHtml, apiBase, prepend); + + EscapeFontFamilyReferences(ref stylesheetHtml, apiBase, prepend); + + + // Check if there are any background images and rewrite those urls + EscapeCssImageReferences(ref stylesheetHtml, apiBase, book); + + var styleContent = RemoveWhiteSpaceFromStylesheets(stylesheetHtml); + + styleContent = styleContent.Replace("body", CssScopeClass); + + if (string.IsNullOrEmpty(styleContent)) return string.Empty; + + var stylesheet = await _cssParser.ParseAsync(styleContent); + foreach (var styleRule in stylesheet.StyleRules) { - // @Import statements will be handled by browser, so we must inline the css into the original file that request it, so they can be Scoped - var prepend = filename.Length > 0 ? filename.Replace(Path.GetFileName(filename), string.Empty) : string.Empty; - var importBuilder = new StringBuilder(); - foreach (Match match in Tasks.Scanner.Parser.Parser.CssImportUrlRegex.Matches(stylesheetHtml)) + if (styleRule.Selector.Text == CssScopeClass) continue; + if (styleRule.Selector.Text.Contains(",")) { - if (!match.Success) continue; + styleRule.Text = styleRule.Text.Replace(styleRule.SelectorText, + string.Join(", ", + styleRule.Selector.Text.Split(",").Select(s => $"{CssScopeClass} " + s))); + continue; + } + styleRule.Text = $"{CssScopeClass} " + styleRule.Text; + } + return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss()); + } - var importFile = match.Groups["Filename"].Value; - var key = CleanContentKeys(importFile); - if (!key.Contains(prepend)) - { - key = prepend + key; - } - if (!book.Content.AllFiles.ContainsKey(key)) continue; + private static void EscapeCssImportReferences(ref string stylesheetHtml, string apiBase, string prepend) + { + foreach (Match match in Tasks.Scanner.Parser.Parser.CssImportUrlRegex.Matches(stylesheetHtml)) + { + if (!match.Success) continue; + var importFile = match.Groups["Filename"].Value; + stylesheetHtml = stylesheetHtml.Replace(importFile, apiBase + prepend + importFile); + } + } - var bookFile = book.Content.AllFiles[key]; - var content = await bookFile.ReadContentAsBytesAsync(); - importBuilder.Append(Encoding.UTF8.GetString(content)); + private static void EscapeFontFamilyReferences(ref string stylesheetHtml, string apiBase, string prepend) + { + foreach (Match match in Tasks.Scanner.Parser.Parser.FontSrcUrlRegex.Matches(stylesheetHtml)) + { + if (!match.Success) continue; + var importFile = match.Groups["Filename"].Value; + stylesheetHtml = stylesheetHtml.Replace(importFile, apiBase + prepend + importFile); + } + } + + private static void EscapeCssImageReferences(ref string stylesheetHtml, string apiBase, EpubBookRef book) + { + var matches = Tasks.Scanner.Parser.Parser.CssImageUrlRegex.Matches(stylesheetHtml); + foreach (Match match in matches) + { + if (!match.Success) continue; + + var importFile = match.Groups["Filename"].Value; + var key = CleanContentKeys(importFile); + if (!book.Content.AllFiles.ContainsKey(key)) continue; + + stylesheetHtml = stylesheetHtml.Replace(importFile, apiBase + key); + } + } + + private static void ScopeImages(HtmlDocument doc, EpubBookRef book, string apiBase) + { + var images = doc.DocumentNode.SelectNodes("//img") + ?? doc.DocumentNode.SelectNodes("//image") ?? doc.DocumentNode.SelectNodes("//svg"); + + if (images == null) return; + + + var parent = images.First().ParentNode; + + foreach (var image in images) + { + + string key = null; + if (image.Attributes["src"] != null) + { + key = "src"; + } + else if (image.Attributes["xlink:href"] != null) + { + key = "xlink:href"; } - stylesheetHtml = stylesheetHtml.Insert(0, importBuilder.ToString()); + if (string.IsNullOrEmpty(key)) continue; - EscapeCssImportReferences(ref stylesheetHtml, apiBase, prepend); + var imageFile = GetKeyForImage(book, image.Attributes[key].Value); + image.Attributes.Remove(key); + // UrlEncode here to transform ../ into an escaped version, which avoids blocking on nginx + image.Attributes.Add(key, $"{apiBase}" + HttpUtility.UrlEncode(imageFile)); - EscapeFontFamilyReferences(ref stylesheetHtml, apiBase, prepend); - - - // Check if there are any background images and rewrite those urls - EscapeCssImageReferences(ref stylesheetHtml, apiBase, book); - - var styleContent = RemoveWhiteSpaceFromStylesheets(stylesheetHtml); - - styleContent = styleContent.Replace("body", CssScopeClass); - - if (string.IsNullOrEmpty(styleContent)) return string.Empty; - - var stylesheet = await _cssParser.ParseAsync(styleContent); - foreach (var styleRule in stylesheet.StyleRules) - { - if (styleRule.Selector.Text == CssScopeClass) continue; - if (styleRule.Selector.Text.Contains(",")) - { - styleRule.Text = styleRule.Text.Replace(styleRule.SelectorText, - string.Join(", ", - styleRule.Selector.Text.Split(",").Select(s => $"{CssScopeClass} " + s))); - continue; - } - styleRule.Text = $"{CssScopeClass} " + styleRule.Text; - } - return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss()); + // Add a custom class that the reader uses to ensure images stay within reader + parent.AddClass("kavita-scale-width-container"); + image.AddClass("kavita-scale-width"); } - private static void EscapeCssImportReferences(ref string stylesheetHtml, string apiBase, string prepend) + } + + /// + /// Returns the image key associated with the file. Contains some basic fallback logic. + /// + /// + /// + /// + private static string GetKeyForImage(EpubBookRef book, string imageFile) + { + if (book.Content.Images.ContainsKey(imageFile)) return imageFile; + + var correctedKey = book.Content.Images.Keys.SingleOrDefault(s => s.EndsWith(imageFile)); + if (correctedKey != null) { - foreach (Match match in Tasks.Scanner.Parser.Parser.CssImportUrlRegex.Matches(stylesheetHtml)) - { - if (!match.Success) continue; - var importFile = match.Groups["Filename"].Value; - stylesheetHtml = stylesheetHtml.Replace(importFile, apiBase + prepend + importFile); - } + imageFile = correctedKey; } - - private static void EscapeFontFamilyReferences(ref string stylesheetHtml, string apiBase, string prepend) + else if (imageFile.StartsWith("..")) { - foreach (Match match in Tasks.Scanner.Parser.Parser.FontSrcUrlRegex.Matches(stylesheetHtml)) - { - if (!match.Success) continue; - var importFile = match.Groups["Filename"].Value; - stylesheetHtml = stylesheetHtml.Replace(importFile, apiBase + prepend + importFile); - } - } - - private static void EscapeCssImageReferences(ref string stylesheetHtml, string apiBase, EpubBookRef book) - { - var matches = Tasks.Scanner.Parser.Parser.CssImageUrlRegex.Matches(stylesheetHtml); - foreach (Match match in matches) - { - if (!match.Success) continue; - - var importFile = match.Groups["Filename"].Value; - var key = CleanContentKeys(importFile); - if (!book.Content.AllFiles.ContainsKey(key)) continue; - - stylesheetHtml = stylesheetHtml.Replace(importFile, apiBase + key); - } - } - - private static void ScopeImages(HtmlDocument doc, EpubBookRef book, string apiBase) - { - var images = doc.DocumentNode.SelectNodes("//img") - ?? doc.DocumentNode.SelectNodes("//image") ?? doc.DocumentNode.SelectNodes("//svg"); - - if (images == null) return; - - - var parent = images.First().ParentNode; - - foreach (var image in images) - { - - string key = null; - if (image.Attributes["src"] != null) - { - key = "src"; - } - else if (image.Attributes["xlink:href"] != null) - { - key = "xlink:href"; - } - - if (string.IsNullOrEmpty(key)) continue; - - var imageFile = GetKeyForImage(book, image.Attributes[key].Value); - image.Attributes.Remove(key); - // UrlEncode here to transform ../ into an escaped version, which avoids blocking on nginx - image.Attributes.Add(key, $"{apiBase}" + HttpUtility.UrlEncode(imageFile)); - - // Add a custom class that the reader uses to ensure images stay within reader - parent.AddClass("kavita-scale-width-container"); - image.AddClass("kavita-scale-width"); - } - - } - - /// - /// Returns the image key associated with the file. Contains some basic fallback logic. - /// - /// - /// - /// - private static string GetKeyForImage(EpubBookRef book, string imageFile) - { - if (book.Content.Images.ContainsKey(imageFile)) return imageFile; - - var correctedKey = book.Content.Images.Keys.SingleOrDefault(s => s.EndsWith(imageFile)); + // There are cases where the key is defined static like OEBPS/Images/1-4.jpg but reference is ../Images/1-4.jpg + correctedKey = + book.Content.Images.Keys.SingleOrDefault(s => s.EndsWith(imageFile.Replace("..", string.Empty))); if (correctedKey != null) { imageFile = correctedKey; } - else if (imageFile.StartsWith("..")) - { - // There are cases where the key is defined static like OEBPS/Images/1-4.jpg but reference is ../Images/1-4.jpg - correctedKey = - book.Content.Images.Keys.SingleOrDefault(s => s.EndsWith(imageFile.Replace("..", string.Empty))); - if (correctedKey != null) - { - imageFile = correctedKey; - } - } - - return imageFile; } - private static string PrepareFinalHtml(HtmlDocument doc, HtmlNode body) - { - // Check if any classes on the html node (some r2l books do this) and move them to body tag for scoping - var htmlNode = doc.DocumentNode.SelectSingleNode("//html"); - if (htmlNode == null || !htmlNode.Attributes.Contains("class")) return body.InnerHtml; + return imageFile; + } - var bodyClasses = body.Attributes.Contains("class") ? body.Attributes["class"].Value : string.Empty; - var classes = htmlNode.Attributes["class"].Value + " " + bodyClasses; - body.Attributes.Add("class", $"{classes}"); - // I actually need the body tag itself for the classes, so i will create a div and put the body stuff there. - return $"
{body.InnerHtml}
"; + private static string PrepareFinalHtml(HtmlDocument doc, HtmlNode body) + { + // Check if any classes on the html node (some r2l books do this) and move them to body tag for scoping + var htmlNode = doc.DocumentNode.SelectSingleNode("//html"); + if (htmlNode == null || !htmlNode.Attributes.Contains("class")) return body.InnerHtml; + + var bodyClasses = body.Attributes.Contains("class") ? body.Attributes["class"].Value : string.Empty; + var classes = htmlNode.Attributes["class"].Value + " " + bodyClasses; + body.Attributes.Add("class", $"{classes}"); + // I actually need the body tag itself for the classes, so i will create a div and put the body stuff there. + return $"
{body.InnerHtml}
"; + } + + private static void RewriteAnchors(int page, HtmlDocument doc, Dictionary mappings) + { + var anchors = doc.DocumentNode.SelectNodes("//a"); + if (anchors == null) return; + + foreach (var anchor in anchors) + { + UpdateLinks(anchor, mappings, page); } + } - private static void RewriteAnchors(int page, HtmlDocument doc, Dictionary mappings) + private async Task InlineStyles(HtmlDocument doc, EpubBookRef book, string apiBase, HtmlNode body) + { + var inlineStyles = doc.DocumentNode.SelectNodes("//style"); + if (inlineStyles != null) { - var anchors = doc.DocumentNode.SelectNodes("//a"); - if (anchors == null) return; - - foreach (var anchor in anchors) + foreach (var inlineStyle in inlineStyles) { - UpdateLinks(anchor, mappings, page); + var styleContent = await ScopeStyles(inlineStyle.InnerHtml, apiBase, "", book); + body.PrependChild(HtmlNode.CreateNode($"")); } } - private async Task InlineStyles(HtmlDocument doc, EpubBookRef book, string apiBase, HtmlNode body) + var styleNodes = doc.DocumentNode.SelectNodes("/html/head/link"); + if (styleNodes != null) { - var inlineStyles = doc.DocumentNode.SelectNodes("//style"); - if (inlineStyles != null) + foreach (var styleLinks in styleNodes) { - foreach (var inlineStyle in inlineStyles) + var key = CleanContentKeys(styleLinks.Attributes["href"].Value); + // Some epubs are malformed the key in content.opf might be: content/resources/filelist_0_0.xml but the actual html links to resources/filelist_0_0.xml + // In this case, we will do a search for the key that ends with + if (!book.Content.Css.ContainsKey(key)) { - var styleContent = await ScopeStyles(inlineStyle.InnerHtml, apiBase, "", book); - body.PrependChild(HtmlNode.CreateNode($"")); - } - } - - var styleNodes = doc.DocumentNode.SelectNodes("/html/head/link"); - if (styleNodes != null) - { - foreach (var styleLinks in styleNodes) - { - var key = CleanContentKeys(styleLinks.Attributes["href"].Value); - // Some epubs are malformed the key in content.opf might be: content/resources/filelist_0_0.xml but the actual html links to resources/filelist_0_0.xml - // In this case, we will do a search for the key that ends with - if (!book.Content.Css.ContainsKey(key)) + var correctedKey = book.Content.Css.Keys.SingleOrDefault(s => s.EndsWith(key)); + if (correctedKey == null) { - var correctedKey = book.Content.Css.Keys.SingleOrDefault(s => s.EndsWith(key)); - if (correctedKey == null) - { - _logger.LogError("Epub is Malformed, key: {Key} is not matching OPF file", key); - continue; - } - - key = correctedKey; + _logger.LogError("Epub is Malformed, key: {Key} is not matching OPF file", key); + continue; } - try - { - var cssFile = book.Content.Css[key]; - - var styleContent = await ScopeStyles(await cssFile.ReadContentAsync(), apiBase, - cssFile.FileName, book); - if (styleContent != null) - { - body.PrependChild(HtmlNode.CreateNode($"")); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an error reading css file for inlining likely due to a key mismatch in metadata"); - } - } - } - } - - public ComicInfo GetComicInfo(string filePath) - { - if (!IsValidFile(filePath) || Tasks.Scanner.Parser.Parser.IsPdf(filePath)) return null; - - try - { - using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions); - var publicationDate = - epubBook.Schema.Package.Metadata.Dates.FirstOrDefault(date => date.Event == "publication")?.Date; - - if (string.IsNullOrEmpty(publicationDate)) - { - publicationDate = epubBook.Schema.Package.Metadata.Dates.FirstOrDefault()?.Date; - } - var dateParsed = DateTime.TryParse(publicationDate, out var date); - var year = 0; - var month = 0; - var day = 0; - switch (dateParsed) - { - case true: - year = date.Year; - month = date.Month; - day = date.Day; - break; - case false when !string.IsNullOrEmpty(publicationDate) && publicationDate.Length == 4: - int.TryParse(publicationDate, out year); - break; + key = correctedKey; } - var info = new ComicInfo() - { - Summary = epubBook.Schema.Package.Metadata.Description, - Writer = string.Join(",", epubBook.Schema.Package.Metadata.Creators.Select(c => Tasks.Scanner.Parser.Parser.CleanAuthor(c.Creator))), - Publisher = string.Join(",", epubBook.Schema.Package.Metadata.Publishers), - Month = month, - Day = day, - Year = year, - Title = epubBook.Title, - Genre = string.Join(",", epubBook.Schema.Package.Metadata.Subjects.Select(s => s.ToLower().Trim())), - LanguageISO = epubBook.Schema.Package.Metadata.Languages.FirstOrDefault() ?? string.Empty - }; - ComicInfo.CleanComicInfo(info); - - // Parse tags not exposed via Library - foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) - { - switch (metadataItem.Name) - { - case "calibre:rating": - info.UserRating = float.Parse(metadataItem.Content); - break; - case "calibre:title_sort": - info.TitleSort = metadataItem.Content; - break; - } - } - - return info; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[GetComicInfo] There was an exception getting metadata"); - } - - return null; - } - - private bool IsValidFile(string filePath) - { - if (!File.Exists(filePath)) - { - _logger.LogWarning("[BookService] Book {EpubFile} could not be found", filePath); - return false; - } - - if (Tasks.Scanner.Parser.Parser.IsBook(filePath)) return true; - - _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB/PDF", filePath); - return false; - } - - public int GetNumberOfPages(string filePath) - { - if (!IsValidFile(filePath)) return 0; - - try - { - if (Tasks.Scanner.Parser.Parser.IsPdf(filePath)) - { - using var docReader = DocLib.Instance.GetDocReader(filePath, new PageDimensions(1080, 1920)); - return docReader.GetPageCount(); - } - - using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions); - return epubBook.Content.Html.Count; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); - } - - return 0; - } - - public static string EscapeTags(string content) - { - content = Regex.Replace(content, @")", ""); - content = Regex.Replace(content, @")", ""); - return content; - } - - /// - /// Removes all leading ../ - /// - /// - /// - public static string CleanContentKeys(string key) - { - return key.Replace("../", string.Empty); - } - - public async Task> CreateKeyToPageMappingAsync(EpubBookRef book) - { - var dict = new Dictionary(); - var pageCount = 0; - foreach (var contentFileRef in await book.GetReadingOrderAsync()) - { - if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) continue; - dict.Add(contentFileRef.FileName, pageCount); - pageCount += 1; - } - - return dict; - } - - /// - /// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books) - /// then null is returned. This expects only an epub file - /// - /// - /// - public ParserInfo ParseInfo(string filePath) - { - if (!Tasks.Scanner.Parser.Parser.IsEpub(filePath)) return null; - - try - { - using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions); - - // - // - // If all three are present, we can take that over dc:title and format as: - // Series = The Dark Tower, Volume = 5, Filename as "Wolves of the Calla" - // In addition, the following can exist and should parse as a series (EPUB 3.2 spec) - // - // The Lord of the Rings - // - // set - // 2 try { - var seriesIndex = string.Empty; - var series = string.Empty; - var specialName = string.Empty; - var groupPosition = string.Empty; - var titleSort = string.Empty; + var cssFile = book.Content.Css[key]; - - foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) + var styleContent = await ScopeStyles(await cssFile.ReadContentAsync(), apiBase, + cssFile.FileName, book); + if (styleContent != null) { - // EPUB 2 and 3 - switch (metadataItem.Name) - { - case "calibre:series_index": - seriesIndex = metadataItem.Content; - break; - case "calibre:series": - series = metadataItem.Content; - break; - case "calibre:title_sort": - specialName = metadataItem.Content; - titleSort = metadataItem.Content; - break; - } - - // EPUB 3.2+ only - switch (metadataItem.Property) - { - case "group-position": - seriesIndex = metadataItem.Content; - break; - case "belongs-to-collection": - series = metadataItem.Content; - break; - case "collection-type": - groupPosition = metadataItem.Content; - break; - } - } - - if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex)) - { - if (string.IsNullOrEmpty(specialName)) - { - specialName = epubBook.Title; - } - var info = new ParserInfo() - { - Chapters = Tasks.Scanner.Parser.Parser.DefaultChapter, - Edition = string.Empty, - Format = MangaFormat.Epub, - Filename = Path.GetFileName(filePath), - Title = specialName?.Trim(), - FullFilePath = filePath, - IsSpecial = false, - Series = series.Trim(), - Volumes = seriesIndex - }; - - // Don't set titleSort if the book belongs to a group - if (!string.IsNullOrEmpty(titleSort) && string.IsNullOrEmpty(seriesIndex) && (groupPosition.Equals("series") || groupPosition.Equals("set"))) - { - info.SeriesSort = titleSort; - } - - return info; + body.PrependChild(HtmlNode.CreateNode($"")); } } - catch (Exception) + catch (Exception ex) { - // Swallow exception + _logger.LogError(ex, "There was an error reading css file for inlining likely due to a key mismatch in metadata"); } - - return new ParserInfo() - { - Chapters = Tasks.Scanner.Parser.Parser.DefaultChapter, - Edition = string.Empty, - Format = MangaFormat.Epub, - Filename = Path.GetFileName(filePath), - Title = epubBook.Title.Trim(), - FullFilePath = filePath, - IsSpecial = false, - Series = epubBook.Title.Trim(), - Volumes = Tasks.Scanner.Parser.Parser.DefaultVolume, - }; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); - } - - return null; - } - - /// - /// Extracts a pdf into images to a target directory. Uses multi-threaded implementation since docnet is slow normally. - /// - /// - /// - public void ExtractPdfImages(string fileFilePath, string targetDirectory) - { - _directoryService.ExistOrCreate(targetDirectory); - - using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); - var pages = docReader.GetPageCount(); - Parallel.For(0, pages, pageNumber => - { - using var stream = StreamManager.GetStream("BookService.GetPdfPage"); - GetPdfPage(docReader, pageNumber, stream); - using var fileStream = File.Create(Path.Combine(targetDirectory, "Page-" + pageNumber + ".png")); - stream.Seek(0, SeekOrigin.Begin); - stream.CopyTo(fileStream); - }); - } - - /// - /// Responsible to scope all the css, links, tags, etc to prepare a self contained html file for the page - /// - /// Html Doc that will be appended to - /// Underlying epub - /// API Url for file loading to pass through - /// Body element from the epub - /// Epub mappings - /// Page number we are loading - /// - public async Task ScopePage(HtmlDocument doc, EpubBookRef book, string apiBase, HtmlNode body, Dictionary mappings, int page) - { - await InlineStyles(doc, book, apiBase, body); - - RewriteAnchors(page, doc, mappings); - - ScopeImages(doc, book, apiBase); - - return PrepareFinalHtml(doc, body); - } - - /// - /// This will return a list of mappings from ID -> page num. ID will be the xhtml key and page num will be the reading order - /// this is used to rewrite anchors in the book text so that we always load properly in our reader. - /// - /// Chapter with at least one file - /// - public async Task> GenerateTableOfContents(Chapter chapter) - { - using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath, BookReaderOptions); - var mappings = await CreateKeyToPageMappingAsync(book); - - var navItems = await book.GetNavigationAsync(); - var chaptersList = new List(); - - foreach (var navigationItem in navItems) - { - if (navigationItem.NestedItems.Count == 0) - { - CreateToCChapter(navigationItem, Array.Empty(), chaptersList, mappings); - continue; - } - - var nestedChapters = new List(); - - foreach (var nestedChapter in navigationItem.NestedItems.Where(n => n.Link != null)) - { - var key = BookService.CleanContentKeys(nestedChapter.Link.ContentFileName); - if (mappings.ContainsKey(key)) - { - nestedChapters.Add(new BookChapterItem() - { - Title = nestedChapter.Title, - Page = mappings[key], - Part = nestedChapter.Link.Anchor ?? string.Empty, - Children = new List() - }); - } - } - - CreateToCChapter(navigationItem, nestedChapters, chaptersList, mappings); - } - - if (chaptersList.Count != 0) return chaptersList; - // Generate from TOC - var tocPage = book.Content.Html.Keys.FirstOrDefault(k => k.ToUpper().Contains("TOC")); - if (tocPage == null) return chaptersList; - - // Find all anchor tags, for each anchor we get inner text, to lower then title case on UI. Get href and generate page content - var doc = new HtmlDocument(); - var content = await book.Content.Html[tocPage].ReadContentAsync(); - doc.LoadHtml(content); - var anchors = doc.DocumentNode.SelectNodes("//a"); - if (anchors == null) return chaptersList; - - foreach (var anchor in anchors) - { - if (!anchor.Attributes.Contains("href")) continue; - - var key = BookService.CleanContentKeys(anchor.Attributes["href"].Value).Split("#")[0]; - if (!mappings.ContainsKey(key)) - { - // Fallback to searching for key (bad epub metadata) - var correctedKey = book.Content.Html.Keys.SingleOrDefault(s => s.EndsWith(key)); - if (!string.IsNullOrEmpty(correctedKey)) - { - key = correctedKey; - } - } - - if (string.IsNullOrEmpty(key) || !mappings.ContainsKey(key)) continue; - var part = string.Empty; - if (anchor.Attributes["href"].Value.Contains('#')) - { - part = anchor.Attributes["href"].Value.Split("#")[1]; - } - chaptersList.Add(new BookChapterItem() - { - Title = anchor.InnerText, - Page = mappings[key], - Part = part, - Children = new List() - }); - } - - return chaptersList; - } - - /// - /// This returns a single page within the epub book. All html will be rewritten to be scoped within our reader, - /// all css is scoped, etc. - /// - /// The requested page - /// The chapterId - /// The path to the cached epub file - /// The API base for Kavita, to rewrite urls to so we load though our endpoint - /// Full epub HTML Page, scoped to Kavita's reader - /// All exceptions throw this - public async Task GetBookPage(int page, int chapterId, string cachedEpubPath, string baseUrl) - { - using var book = await EpubReader.OpenBookAsync(cachedEpubPath, BookReaderOptions); - var mappings = await CreateKeyToPageMappingAsync(book); - var apiBase = baseUrl + "book/" + chapterId + "/" + BookApiUrl; - - var counter = 0; - var doc = new HtmlDocument {OptionFixNestedTags = true}; - - - var bookPages = await book.GetReadingOrderAsync(); - foreach (var contentFileRef in bookPages) - { - if (page != counter) - { - counter++; - continue; - } - - var content = await contentFileRef.ReadContentAsync(); - if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return content; - - // In more cases than not, due to this being XML not HTML, we need to escape the script tags. - content = BookService.EscapeTags(content); - - doc.LoadHtml(content); - var body = doc.DocumentNode.SelectSingleNode("//body"); - - if (body == null) - { - if (doc.ParseErrors.Any()) - { - LogBookErrors(book, contentFileRef, doc); - throw new KavitaException("The file is malformed! Cannot read."); - } - _logger.LogError("{FilePath} has no body tag! Generating one for support. Book may be skewed", book.FilePath); - doc.DocumentNode.SelectSingleNode("/html").AppendChild(HtmlNode.CreateNode("")); - body = doc.DocumentNode.SelectSingleNode("/html/body"); - } - - return await ScopePage(doc, book, apiBase, body, mappings, page); - } - - throw new KavitaException("Could not find the appropriate html for that page"); - } - - private static void CreateToCChapter(EpubNavigationItemRef navigationItem, IList nestedChapters, IList chaptersList, - IReadOnlyDictionary mappings) - { - if (navigationItem.Link == null) - { - var item = new BookChapterItem() - { - Title = navigationItem.Title, - Children = nestedChapters - }; - if (nestedChapters.Count > 0) - { - item.Page = nestedChapters[0].Page; - } - - chaptersList.Add(item); - } - else - { - var groupKey = CleanContentKeys(navigationItem.Link.ContentFileName); - if (mappings.ContainsKey(groupKey)) - { - chaptersList.Add(new BookChapterItem() - { - Title = navigationItem.Title, - Page = mappings[groupKey], - Children = nestedChapters - }); - } - } - } - - - /// - /// Extracts the cover image to covers directory and returns file path back - /// - /// - /// Name of the new file. - /// Where to output the file, defaults to covers directory - /// - public string GetCoverImage(string fileFilePath, string fileName, string outputDirectory) - { - if (!IsValidFile(fileFilePath)) return string.Empty; - - if (Tasks.Scanner.Parser.Parser.IsPdf(fileFilePath)) - { - return GetPdfCoverImage(fileFilePath, fileName, outputDirectory); - } - - using var epubBook = EpubReader.OpenBook(fileFilePath, BookReaderOptions); - - try - { - // Try to get the cover image from OPF file, if not set, try to parse it from all the files, then result to the first one. - var coverImageContent = epubBook.Content.Cover - ?? epubBook.Content.Images.Values.FirstOrDefault(file => Tasks.Scanner.Parser.Parser.IsCoverImage(file.FileName)) - ?? epubBook.Content.Images.Values.FirstOrDefault(); - - if (coverImageContent == null) return string.Empty; - using var stream = coverImageContent.GetContentStream(); - - return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); - } - - return string.Empty; - } - - - private string GetPdfCoverImage(string fileFilePath, string fileName, string outputDirectory) - { - try - { - using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); - if (docReader.GetPageCount() == 0) return string.Empty; - - using var stream = StreamManager.GetStream("BookService.GetPdfPage"); - GetPdfPage(docReader, 0, stream); - - return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); - - } - catch (Exception ex) - { - _logger.LogWarning(ex, - "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", - fileFilePath); - } - - return string.Empty; - } - - /// - /// Returns an image raster of a page within a PDF - /// - /// - /// - /// - private static void GetPdfPage(IDocReader docReader, int pageNumber, Stream stream) - { - using var pageReader = docReader.GetPageReader(pageNumber); - var rawBytes = pageReader.GetImage(new NaiveTransparencyRemover()); - var width = pageReader.GetPageWidth(); - var height = pageReader.GetPageHeight(); - var image = Image.LoadPixelData(rawBytes, width, height); - - stream.Seek(0, SeekOrigin.Begin); - image.SaveAsPng(stream); - stream.Seek(0, SeekOrigin.Begin); - } - - private static string RemoveWhiteSpaceFromStylesheets(string body) - { - if (string.IsNullOrEmpty(body)) - { - return string.Empty; - } - - // Remove comments from CSS - body = Regex.Replace(body, @"/\*[\d\D]*?\*/", string.Empty); - - body = Regex.Replace(body, @"[a-zA-Z]+#", "#"); - body = Regex.Replace(body, @"[\n\r]+\s*", string.Empty); - body = Regex.Replace(body, @"\s+", " "); - body = Regex.Replace(body, @"\s?([:,;{}])\s?", "$1"); - try - { - body = body.Replace(";}", "}"); - } - catch (Exception) - { - /* Swallow exception. Some css doesn't have style rules ending in ; */ - } - - body = Regex.Replace(body, @"([\s:]0)(px|pt|%|em)", "$1"); - - - return body; - } - - private void LogBookErrors(EpubBookRef book, EpubContentFileRef contentFileRef, HtmlDocument doc) - { - _logger.LogError("{FilePath} has an invalid html file (Page {PageName})", book.FilePath, contentFileRef.FileName); - foreach (var error in doc.ParseErrors) - { - _logger.LogError("Line {LineNumber}, Reason: {Reason}", error.Line, error.Reason); } } } + + public ComicInfo GetComicInfo(string filePath) + { + if (!IsValidFile(filePath) || Tasks.Scanner.Parser.Parser.IsPdf(filePath)) return null; + + try + { + using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions); + var publicationDate = + epubBook.Schema.Package.Metadata.Dates.FirstOrDefault(date => date.Event == "publication")?.Date; + + if (string.IsNullOrEmpty(publicationDate)) + { + publicationDate = epubBook.Schema.Package.Metadata.Dates.FirstOrDefault()?.Date; + } + var dateParsed = DateTime.TryParse(publicationDate, out var date); + var year = 0; + var month = 0; + var day = 0; + switch (dateParsed) + { + case true: + year = date.Year; + month = date.Month; + day = date.Day; + break; + case false when !string.IsNullOrEmpty(publicationDate) && publicationDate.Length == 4: + int.TryParse(publicationDate, out year); + break; + } + + var info = new ComicInfo() + { + Summary = epubBook.Schema.Package.Metadata.Description, + Writer = string.Join(",", epubBook.Schema.Package.Metadata.Creators.Select(c => Tasks.Scanner.Parser.Parser.CleanAuthor(c.Creator))), + Publisher = string.Join(",", epubBook.Schema.Package.Metadata.Publishers), + Month = month, + Day = day, + Year = year, + Title = epubBook.Title, + Genre = string.Join(",", epubBook.Schema.Package.Metadata.Subjects.Select(s => s.ToLower().Trim())), + LanguageISO = epubBook.Schema.Package.Metadata.Languages.FirstOrDefault() ?? string.Empty + }; + ComicInfo.CleanComicInfo(info); + + // Parse tags not exposed via Library + foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) + { + switch (metadataItem.Name) + { + case "calibre:rating": + info.UserRating = float.Parse(metadataItem.Content); + break; + case "calibre:title_sort": + info.TitleSort = metadataItem.Content; + break; + case "calibre:series": + info.Series = metadataItem.Content; + info.SeriesSort = metadataItem.Content; + break; + } + } + + return info; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[GetComicInfo] There was an exception getting metadata"); + } + + return null; + } + + private bool IsValidFile(string filePath) + { + if (!File.Exists(filePath)) + { + _logger.LogWarning("[BookService] Book {EpubFile} could not be found", filePath); + return false; + } + + if (Tasks.Scanner.Parser.Parser.IsBook(filePath)) return true; + + _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB/PDF", filePath); + return false; + } + + public int GetNumberOfPages(string filePath) + { + if (!IsValidFile(filePath)) return 0; + + try + { + if (Tasks.Scanner.Parser.Parser.IsPdf(filePath)) + { + using var docReader = DocLib.Instance.GetDocReader(filePath, new PageDimensions(1080, 1920)); + return docReader.GetPageCount(); + } + + using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions); + return epubBook.Content.Html.Count; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); + } + + return 0; + } + + public static string EscapeTags(string content) + { + content = Regex.Replace(content, @")", ""); + content = Regex.Replace(content, @")", ""); + return content; + } + + /// + /// Removes all leading ../ + /// + /// + /// + public static string CleanContentKeys(string key) + { + return key.Replace("../", string.Empty); + } + + public async Task> CreateKeyToPageMappingAsync(EpubBookRef book) + { + var dict = new Dictionary(); + var pageCount = 0; + foreach (var contentFileRef in await book.GetReadingOrderAsync()) + { + if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) continue; + dict.Add(contentFileRef.FileName, pageCount); + pageCount += 1; + } + + return dict; + } + + /// + /// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books) + /// then null is returned. This expects only an epub file + /// + /// + /// + public ParserInfo ParseInfo(string filePath) + { + if (!Tasks.Scanner.Parser.Parser.IsEpub(filePath)) return null; + + try + { + using var epubBook = EpubReader.OpenBook(filePath, BookReaderOptions); + + // + // + // If all three are present, we can take that over dc:title and format as: + // Series = The Dark Tower, Volume = 5, Filename as "Wolves of the Calla" + // In addition, the following can exist and should parse as a series (EPUB 3.2 spec) + // + // The Lord of the Rings + // + // set + // 2 + try + { + var seriesIndex = string.Empty; + var series = string.Empty; + var specialName = string.Empty; + + + foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) + { + // EPUB 2 and 3 + switch (metadataItem.Name) + { + case "calibre:series_index": + seriesIndex = metadataItem.Content; + break; + case "calibre:series": + series = metadataItem.Content; + break; + case "calibre:title_sort": + specialName = metadataItem.Content; + break; + } + + // EPUB 3.2+ only + switch (metadataItem.Property) + { + case "group-position": + seriesIndex = metadataItem.Content; + break; + case "belongs-to-collection": + series = metadataItem.Content; + break; + case "collection-type": + // These look to be genres from https://manual.calibre-ebook.com/sub_groups.html + break; + } + } + + if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex)) + { + if (string.IsNullOrEmpty(specialName)) + { + specialName = epubBook.Title; + } + var info = new ParserInfo() + { + Chapters = Tasks.Scanner.Parser.Parser.DefaultChapter, + Edition = string.Empty, + Format = MangaFormat.Epub, + Filename = Path.GetFileName(filePath), + Title = specialName?.Trim(), + FullFilePath = filePath, + IsSpecial = false, + Series = series.Trim(), + SeriesSort = series.Trim(), + Volumes = seriesIndex + }; + + return info; + } + } + catch (Exception) + { + // Swallow exception + } + + return new ParserInfo() + { + Chapters = Tasks.Scanner.Parser.Parser.DefaultChapter, + Edition = string.Empty, + Format = MangaFormat.Epub, + Filename = Path.GetFileName(filePath), + Title = epubBook.Title.Trim(), + FullFilePath = filePath, + IsSpecial = false, + Series = epubBook.Title.Trim(), + Volumes = Tasks.Scanner.Parser.Parser.DefaultVolume, + }; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); + } + + return null; + } + + /// + /// Extracts a pdf into images to a target directory. Uses multi-threaded implementation since docnet is slow normally. + /// + /// + /// + public void ExtractPdfImages(string fileFilePath, string targetDirectory) + { + _directoryService.ExistOrCreate(targetDirectory); + + using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); + var pages = docReader.GetPageCount(); + Parallel.For(0, pages, pageNumber => + { + using var stream = StreamManager.GetStream("BookService.GetPdfPage"); + GetPdfPage(docReader, pageNumber, stream); + using var fileStream = File.Create(Path.Combine(targetDirectory, "Page-" + pageNumber + ".png")); + stream.Seek(0, SeekOrigin.Begin); + stream.CopyTo(fileStream); + }); + } + + /// + /// Responsible to scope all the css, links, tags, etc to prepare a self contained html file for the page + /// + /// Html Doc that will be appended to + /// Underlying epub + /// API Url for file loading to pass through + /// Body element from the epub + /// Epub mappings + /// Page number we are loading + /// + public async Task ScopePage(HtmlDocument doc, EpubBookRef book, string apiBase, HtmlNode body, Dictionary mappings, int page) + { + await InlineStyles(doc, book, apiBase, body); + + RewriteAnchors(page, doc, mappings); + + ScopeImages(doc, book, apiBase); + + return PrepareFinalHtml(doc, body); + } + + /// + /// This will return a list of mappings from ID -> page num. ID will be the xhtml key and page num will be the reading order + /// this is used to rewrite anchors in the book text so that we always load properly in our reader. + /// + /// Chapter with at least one file + /// + public async Task> GenerateTableOfContents(Chapter chapter) + { + using var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath, BookReaderOptions); + var mappings = await CreateKeyToPageMappingAsync(book); + + var navItems = await book.GetNavigationAsync(); + var chaptersList = new List(); + + foreach (var navigationItem in navItems) + { + if (navigationItem.NestedItems.Count == 0) + { + CreateToCChapter(navigationItem, Array.Empty(), chaptersList, mappings); + continue; + } + + var nestedChapters = new List(); + + foreach (var nestedChapter in navigationItem.NestedItems.Where(n => n.Link != null)) + { + var key = BookService.CleanContentKeys(nestedChapter.Link.ContentFileName); + if (mappings.ContainsKey(key)) + { + nestedChapters.Add(new BookChapterItem() + { + Title = nestedChapter.Title, + Page = mappings[key], + Part = nestedChapter.Link.Anchor ?? string.Empty, + Children = new List() + }); + } + } + + CreateToCChapter(navigationItem, nestedChapters, chaptersList, mappings); + } + + if (chaptersList.Count != 0) return chaptersList; + // Generate from TOC + var tocPage = book.Content.Html.Keys.FirstOrDefault(k => k.ToUpper().Contains("TOC")); + if (tocPage == null) return chaptersList; + + // Find all anchor tags, for each anchor we get inner text, to lower then title case on UI. Get href and generate page content + var doc = new HtmlDocument(); + var content = await book.Content.Html[tocPage].ReadContentAsync(); + doc.LoadHtml(content); + var anchors = doc.DocumentNode.SelectNodes("//a"); + if (anchors == null) return chaptersList; + + foreach (var anchor in anchors) + { + if (!anchor.Attributes.Contains("href")) continue; + + var key = BookService.CleanContentKeys(anchor.Attributes["href"].Value).Split("#")[0]; + if (!mappings.ContainsKey(key)) + { + // Fallback to searching for key (bad epub metadata) + var correctedKey = book.Content.Html.Keys.SingleOrDefault(s => s.EndsWith(key)); + if (!string.IsNullOrEmpty(correctedKey)) + { + key = correctedKey; + } + } + + if (string.IsNullOrEmpty(key) || !mappings.ContainsKey(key)) continue; + var part = string.Empty; + if (anchor.Attributes["href"].Value.Contains('#')) + { + part = anchor.Attributes["href"].Value.Split("#")[1]; + } + chaptersList.Add(new BookChapterItem() + { + Title = anchor.InnerText, + Page = mappings[key], + Part = part, + Children = new List() + }); + } + + return chaptersList; + } + + /// + /// This returns a single page within the epub book. All html will be rewritten to be scoped within our reader, + /// all css is scoped, etc. + /// + /// The requested page + /// The chapterId + /// The path to the cached epub file + /// The API base for Kavita, to rewrite urls to so we load though our endpoint + /// Full epub HTML Page, scoped to Kavita's reader + /// All exceptions throw this + public async Task GetBookPage(int page, int chapterId, string cachedEpubPath, string baseUrl) + { + using var book = await EpubReader.OpenBookAsync(cachedEpubPath, BookReaderOptions); + var mappings = await CreateKeyToPageMappingAsync(book); + var apiBase = baseUrl + "book/" + chapterId + "/" + BookApiUrl; + + var counter = 0; + var doc = new HtmlDocument {OptionFixNestedTags = true}; + + + var bookPages = await book.GetReadingOrderAsync(); + foreach (var contentFileRef in bookPages) + { + if (page != counter) + { + counter++; + continue; + } + + var content = await contentFileRef.ReadContentAsync(); + if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return content; + + // In more cases than not, due to this being XML not HTML, we need to escape the script tags. + content = BookService.EscapeTags(content); + + doc.LoadHtml(content); + var body = doc.DocumentNode.SelectSingleNode("//body"); + + if (body == null) + { + if (doc.ParseErrors.Any()) + { + LogBookErrors(book, contentFileRef, doc); + throw new KavitaException("The file is malformed! Cannot read."); + } + _logger.LogError("{FilePath} has no body tag! Generating one for support. Book may be skewed", book.FilePath); + doc.DocumentNode.SelectSingleNode("/html").AppendChild(HtmlNode.CreateNode("")); + body = doc.DocumentNode.SelectSingleNode("/html/body"); + } + + return await ScopePage(doc, book, apiBase, body, mappings, page); + } + + throw new KavitaException("Could not find the appropriate html for that page"); + } + + private static void CreateToCChapter(EpubNavigationItemRef navigationItem, IList nestedChapters, IList chaptersList, + IReadOnlyDictionary mappings) + { + if (navigationItem.Link == null) + { + var item = new BookChapterItem() + { + Title = navigationItem.Title, + Children = nestedChapters + }; + if (nestedChapters.Count > 0) + { + item.Page = nestedChapters[0].Page; + } + + chaptersList.Add(item); + } + else + { + var groupKey = CleanContentKeys(navigationItem.Link.ContentFileName); + if (mappings.ContainsKey(groupKey)) + { + chaptersList.Add(new BookChapterItem() + { + Title = navigationItem.Title, + Page = mappings[groupKey], + Children = nestedChapters + }); + } + } + } + + + /// + /// Extracts the cover image to covers directory and returns file path back + /// + /// + /// Name of the new file. + /// Where to output the file, defaults to covers directory + /// + public string GetCoverImage(string fileFilePath, string fileName, string outputDirectory) + { + if (!IsValidFile(fileFilePath)) return string.Empty; + + if (Tasks.Scanner.Parser.Parser.IsPdf(fileFilePath)) + { + return GetPdfCoverImage(fileFilePath, fileName, outputDirectory); + } + + using var epubBook = EpubReader.OpenBook(fileFilePath, BookReaderOptions); + + try + { + // Try to get the cover image from OPF file, if not set, try to parse it from all the files, then result to the first one. + var coverImageContent = epubBook.Content.Cover + ?? epubBook.Content.Images.Values.FirstOrDefault(file => Tasks.Scanner.Parser.Parser.IsCoverImage(file.FileName)) + ?? epubBook.Content.Images.Values.FirstOrDefault(); + + if (coverImageContent == null) return string.Empty; + using var stream = coverImageContent.GetContentStream(); + + return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); + } + + return string.Empty; + } + + + private string GetPdfCoverImage(string fileFilePath, string fileName, string outputDirectory) + { + try + { + using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); + if (docReader.GetPageCount() == 0) return string.Empty; + + using var stream = StreamManager.GetStream("BookService.GetPdfPage"); + GetPdfPage(docReader, 0, stream); + + return _imageService.WriteCoverThumbnail(stream, fileName, outputDirectory); + + } + catch (Exception ex) + { + _logger.LogWarning(ex, + "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", + fileFilePath); + } + + return string.Empty; + } + + /// + /// Returns an image raster of a page within a PDF + /// + /// + /// + /// + private static void GetPdfPage(IDocReader docReader, int pageNumber, Stream stream) + { + using var pageReader = docReader.GetPageReader(pageNumber); + var rawBytes = pageReader.GetImage(new NaiveTransparencyRemover()); + var width = pageReader.GetPageWidth(); + var height = pageReader.GetPageHeight(); + var image = Image.LoadPixelData(rawBytes, width, height); + + stream.Seek(0, SeekOrigin.Begin); + image.SaveAsPng(stream); + stream.Seek(0, SeekOrigin.Begin); + } + + private static string RemoveWhiteSpaceFromStylesheets(string body) + { + if (string.IsNullOrEmpty(body)) + { + return string.Empty; + } + + // Remove comments from CSS + body = Regex.Replace(body, @"/\*[\d\D]*?\*/", string.Empty); + + body = Regex.Replace(body, @"[a-zA-Z]+#", "#"); + body = Regex.Replace(body, @"[\n\r]+\s*", string.Empty); + body = Regex.Replace(body, @"\s+", " "); + body = Regex.Replace(body, @"\s?([:,;{}])\s?", "$1"); + try + { + body = body.Replace(";}", "}"); + } + catch (Exception) + { + //Swallow exception. Some css don't have style rules ending in ';' + } + + body = Regex.Replace(body, @"([\s:]0)(px|pt|%|em)", "$1"); + + + return body; + } + + private void LogBookErrors(EpubBookRef book, EpubContentFileRef contentFileRef, HtmlDocument doc) + { + _logger.LogError("{FilePath} has an invalid html file (Page {PageName})", book.FilePath, contentFileRef.FileName); + foreach (var error in doc.ParseErrors) + { + _logger.LogError("Line {LineNumber}, Reason: {Reason}", error.Line, error.Reason); + } + } } diff --git a/API/Services/BookmarkService.cs b/API/Services/BookmarkService.cs index c798c47d1..4d9b88ff4 100644 --- a/API/Services/BookmarkService.cs +++ b/API/Services/BookmarkService.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Constants; using API.Data; using API.DTOs.Reader; using API.Entities; @@ -78,15 +79,14 @@ public class BookmarkService : IBookmarkService /// If the save to DB and copy was successful public async Task BookmarkPage(AppUser userWithBookmarks, BookmarkDto bookmarkDto, string imageToBookmark) { + if (userWithBookmarks == null || userWithBookmarks.Bookmarks == null) return false; try { - var userBookmark = - await _unitOfWork.UserRepository.GetBookmarkForPage(bookmarkDto.Page, bookmarkDto.ChapterId, userWithBookmarks.Id); - + var userBookmark = userWithBookmarks.Bookmarks.SingleOrDefault(b => b.Page == bookmarkDto.Page && b.ChapterId == bookmarkDto.ChapterId); if (userBookmark != null) { _logger.LogError("Bookmark already exists for Series {SeriesId}, Volume {VolumeId}, Chapter {ChapterId}, Page {PageNum}", bookmarkDto.SeriesId, bookmarkDto.VolumeId, bookmarkDto.ChapterId, bookmarkDto.Page); - return false; + return true; } var fileInfo = _directoryService.FileSystem.FileInfo.FromFileName(imageToBookmark); @@ -100,14 +100,13 @@ public class BookmarkService : IBookmarkService VolumeId = bookmarkDto.VolumeId, SeriesId = bookmarkDto.SeriesId, ChapterId = bookmarkDto.ChapterId, - FileName = Path.Join(targetFolderStem, fileInfo.Name) + FileName = Path.Join(targetFolderStem, fileInfo.Name), + AppUserId = userWithBookmarks.Id }; _directoryService.CopyFileToDirectory(imageToBookmark, targetFilepath); - userWithBookmarks.Bookmarks ??= new List(); - userWithBookmarks.Bookmarks.Add(bookmark); - _unitOfWork.UserRepository.Update(userWithBookmarks); + _unitOfWork.UserRepository.Add(bookmark); await _unitOfWork.CommitAsync(); if (settings.ConvertBookmarkToWebP) @@ -135,15 +134,12 @@ public class BookmarkService : IBookmarkService public async Task RemoveBookmarkPage(AppUser userWithBookmarks, BookmarkDto bookmarkDto) { if (userWithBookmarks.Bookmarks == null) return true; + var bookmarkToDelete = userWithBookmarks.Bookmarks.SingleOrDefault(x => + x.ChapterId == bookmarkDto.ChapterId && x.Page == bookmarkDto.Page); try { - var bookmarkToDelete = userWithBookmarks.Bookmarks.SingleOrDefault(x => - x.ChapterId == bookmarkDto.ChapterId && x.AppUserId == userWithBookmarks.Id && x.Page == bookmarkDto.Page && - x.SeriesId == bookmarkDto.SeriesId); - if (bookmarkToDelete != null) { - await DeleteBookmarkFiles(new[] {bookmarkToDelete}); _unitOfWork.UserRepository.Delete(bookmarkToDelete); } @@ -151,10 +147,10 @@ public class BookmarkService : IBookmarkService } catch (Exception) { - await _unitOfWork.RollbackAsync(); return false; } + await DeleteBookmarkFiles(new[] {bookmarkToDelete}); return true; } diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index b81b87d91..a150bde22 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -10,245 +10,244 @@ using API.Extensions; using Kavita.Common; using Microsoft.Extensions.Logging; -namespace API.Services +namespace API.Services; + +public interface ICacheService { - public interface ICacheService + /// + /// Ensures the cache is created for the given chapter and if not, will create it. Should be called before any other + /// cache operations (except cleanup). + /// + /// + /// Chapter for the passed chapterId. Side-effect from ensuring cache. + Task Ensure(int chapterId); + /// + /// Clears cache directory of all volumes. This can be invoked from deleting a library or a series. + /// + /// Volumes that belong to that library. Assume the library might have been deleted before this invocation. + void CleanupChapters(IEnumerable chapterIds); + void CleanupBookmarks(IEnumerable seriesIds); + string GetCachedPagePath(Chapter chapter, int page); + string GetCachedBookmarkPagePath(int seriesId, int page); + string GetCachedFile(Chapter chapter); + public void ExtractChapterFiles(string extractPath, IReadOnlyList files); + Task CacheBookmarkForSeries(int userId, int seriesId); + void CleanupBookmarkCache(int seriesId); +} +public class CacheService : ICacheService +{ + private readonly ILogger _logger; + private readonly IUnitOfWork _unitOfWork; + private readonly IDirectoryService _directoryService; + private readonly IReadingItemService _readingItemService; + private readonly IBookmarkService _bookmarkService; + + public CacheService(ILogger logger, IUnitOfWork unitOfWork, + IDirectoryService directoryService, IReadingItemService readingItemService, + IBookmarkService bookmarkService) { - /// - /// Ensures the cache is created for the given chapter and if not, will create it. Should be called before any other - /// cache operations (except cleanup). - /// - /// - /// Chapter for the passed chapterId. Side-effect from ensuring cache. - Task Ensure(int chapterId); - /// - /// Clears cache directory of all volumes. This can be invoked from deleting a library or a series. - /// - /// Volumes that belong to that library. Assume the library might have been deleted before this invocation. - void CleanupChapters(IEnumerable chapterIds); - void CleanupBookmarks(IEnumerable seriesIds); - string GetCachedPagePath(Chapter chapter, int page); - string GetCachedBookmarkPagePath(int seriesId, int page); - string GetCachedFile(Chapter chapter); - public void ExtractChapterFiles(string extractPath, IReadOnlyList files); - Task CacheBookmarkForSeries(int userId, int seriesId); - void CleanupBookmarkCache(int seriesId); + _logger = logger; + _unitOfWork = unitOfWork; + _directoryService = directoryService; + _readingItemService = readingItemService; + _bookmarkService = bookmarkService; } - public class CacheService : ICacheService + + public string GetCachedBookmarkPagePath(int seriesId, int page) { - private readonly ILogger _logger; - private readonly IUnitOfWork _unitOfWork; - private readonly IDirectoryService _directoryService; - private readonly IReadingItemService _readingItemService; - private readonly IBookmarkService _bookmarkService; + // Calculate what chapter the page belongs to + var path = GetBookmarkCachePath(seriesId); + var files = _directoryService.GetFilesWithExtension(path, Tasks.Scanner.Parser.Parser.ImageFileExtensions); + files = files + .AsEnumerable() + .OrderByNatural(Path.GetFileNameWithoutExtension) + .ToArray(); - public CacheService(ILogger logger, IUnitOfWork unitOfWork, - IDirectoryService directoryService, IReadingItemService readingItemService, - IBookmarkService bookmarkService) + if (files.Length == 0) { - _logger = logger; - _unitOfWork = unitOfWork; - _directoryService = directoryService; - _readingItemService = readingItemService; - _bookmarkService = bookmarkService; + return string.Empty; } - public string GetCachedBookmarkPagePath(int seriesId, int page) + // Since array is 0 based, we need to keep that in account (only affects last image) + return page == files.Length ? files.ElementAt(page - 1) : files.ElementAt(page); + } + + /// + /// Returns the full path to the cached file. If the file does not exist, will fallback to the original. + /// + /// + /// + public string GetCachedFile(Chapter chapter) + { + var extractPath = GetCachePath(chapter.Id); + var path = Path.Join(extractPath, _directoryService.FileSystem.Path.GetFileName(chapter.Files.First().FilePath)); + if (!(_directoryService.FileSystem.FileInfo.FromFileName(path).Exists)) { - // Calculate what chapter the page belongs to - var path = GetBookmarkCachePath(seriesId); - var files = _directoryService.GetFilesWithExtension(path, Tasks.Scanner.Parser.Parser.ImageFileExtensions); - files = files - .AsEnumerable() - .OrderByNatural(Path.GetFileNameWithoutExtension) - .ToArray(); - - if (files.Length == 0) - { - return string.Empty; - } - - // Since array is 0 based, we need to keep that in account (only affects last image) - return page == files.Length ? files.ElementAt(page - 1) : files.ElementAt(page); + path = chapter.Files.First().FilePath; } + return path; + } - /// - /// Returns the full path to the cached file. If the file does not exist, will fallback to the original. - /// - /// - /// - public string GetCachedFile(Chapter chapter) + + /// + /// Caches the files for the given chapter to CacheDirectory + /// + /// + /// This will always return the Chapter for the chapterId + public async Task Ensure(int chapterId) + { + _directoryService.ExistOrCreate(_directoryService.CacheDirectory); + var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); + var extractPath = GetCachePath(chapterId); + + if (_directoryService.Exists(extractPath)) return chapter; + var files = chapter.Files.ToList(); + ExtractChapterFiles(extractPath, files); + + return chapter; + } + + /// + /// This is an internal method for cache service for extracting chapter files to disk. The code is structured + /// for cache service, but can be re-used (download bookmarks) + /// + /// + /// + /// + public void ExtractChapterFiles(string extractPath, IReadOnlyList files) + { + var removeNonImages = true; + var fileCount = files.Count; + var extraPath = ""; + var extractDi = _directoryService.FileSystem.DirectoryInfo.FromDirectoryName(extractPath); + + if (files.Count > 0 && files[0].Format == MangaFormat.Image) { - var extractPath = GetCachePath(chapter.Id); - var path = Path.Join(extractPath, _directoryService.FileSystem.Path.GetFileName(chapter.Files.First().FilePath)); - if (!(_directoryService.FileSystem.FileInfo.FromFileName(path).Exists)) - { - path = chapter.Files.First().FilePath; - } - return path; - } - - - /// - /// Caches the files for the given chapter to CacheDirectory - /// - /// - /// This will always return the Chapter for the chapterId - public async Task Ensure(int chapterId) - { - _directoryService.ExistOrCreate(_directoryService.CacheDirectory); - var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); - var extractPath = GetCachePath(chapterId); - - if (_directoryService.Exists(extractPath)) return chapter; - var files = chapter.Files.ToList(); - ExtractChapterFiles(extractPath, files); - - return chapter; - } - - /// - /// This is an internal method for cache service for extracting chapter files to disk. The code is structured - /// for cache service, but can be re-used (download bookmarks) - /// - /// - /// - /// - public void ExtractChapterFiles(string extractPath, IReadOnlyList files) - { - var removeNonImages = true; - var fileCount = files.Count; - var extraPath = ""; - var extractDi = _directoryService.FileSystem.DirectoryInfo.FromDirectoryName(extractPath); - - if (files.Count > 0 && files[0].Format == MangaFormat.Image) - { - _readingItemService.Extract(files[0].FilePath, extractPath, MangaFormat.Image, files.Count); - _directoryService.Flatten(extractDi.FullName); - } - - foreach (var file in files) - { - if (fileCount > 1) - { - extraPath = file.Id + string.Empty; - } - - switch (file.Format) - { - case MangaFormat.Archive: - _readingItemService.Extract(file.FilePath, Path.Join(extractPath, extraPath), file.Format); - break; - case MangaFormat.Epub: - case MangaFormat.Pdf: - { - removeNonImages = false; - if (!_directoryService.FileSystem.File.Exists(files[0].FilePath)) - { - _logger.LogError("{File} does not exist on disk", files[0].FilePath); - throw new KavitaException($"{files[0].FilePath} does not exist on disk"); - } - - _directoryService.ExistOrCreate(extractPath); - _directoryService.CopyFileToDirectory(files[0].FilePath, extractPath); - break; - } - } - } - + _readingItemService.Extract(files[0].FilePath, extractPath, MangaFormat.Image, files.Count); _directoryService.Flatten(extractDi.FullName); - if (removeNonImages) + } + + foreach (var file in files) + { + if (fileCount > 1) { - _directoryService.RemoveNonImages(extractDi.FullName); + extraPath = file.Id + string.Empty; + } + + switch (file.Format) + { + case MangaFormat.Archive: + _readingItemService.Extract(file.FilePath, Path.Join(extractPath, extraPath), file.Format); + break; + case MangaFormat.Epub: + case MangaFormat.Pdf: + { + removeNonImages = false; + if (!_directoryService.FileSystem.File.Exists(files[0].FilePath)) + { + _logger.LogError("{File} does not exist on disk", files[0].FilePath); + throw new KavitaException($"{files[0].FilePath} does not exist on disk"); + } + + _directoryService.ExistOrCreate(extractPath); + _directoryService.CopyFileToDirectory(files[0].FilePath, extractPath); + break; + } } } - /// - /// Removes the cached files and folders for a set of chapterIds - /// - /// - public void CleanupChapters(IEnumerable chapterIds) + _directoryService.Flatten(extractDi.FullName); + if (removeNonImages) { - foreach (var chapter in chapterIds) - { - _directoryService.ClearAndDeleteDirectory(GetCachePath(chapter)); - } + _directoryService.RemoveNonImages(extractDi.FullName); + } + } + + /// + /// Removes the cached files and folders for a set of chapterIds + /// + /// + public void CleanupChapters(IEnumerable chapterIds) + { + foreach (var chapter in chapterIds) + { + _directoryService.ClearAndDeleteDirectory(GetCachePath(chapter)); + } + } + + /// + /// Removes the cached files and folders for a set of chapterIds + /// + /// + public void CleanupBookmarks(IEnumerable seriesIds) + { + foreach (var series in seriesIds) + { + _directoryService.ClearAndDeleteDirectory(GetBookmarkCachePath(series)); + } + } + + + /// + /// Returns the cache path for a given Chapter. Should be cacheDirectory/{chapterId}/ + /// + /// + /// + private string GetCachePath(int chapterId) + { + return _directoryService.FileSystem.Path.GetFullPath(_directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, $"{chapterId}/")); + } + + private string GetBookmarkCachePath(int seriesId) + { + return _directoryService.FileSystem.Path.GetFullPath(_directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, $"{seriesId}_bookmarks/")); + } + + /// + /// Returns the absolute path of a cached page. + /// + /// Chapter entity with Files populated. + /// Page number to look for + /// Page filepath or empty if no files found. + public string GetCachedPagePath(Chapter chapter, int page) + { + // Calculate what chapter the page belongs to + var path = GetCachePath(chapter.Id); + // TODO: We can optimize this by extracting and renaming, so we don't need to scan for the files and can do a direct access + var files = _directoryService.GetFilesWithExtension(path, Tasks.Scanner.Parser.Parser.ImageFileExtensions) + .OrderByNatural(Path.GetFileNameWithoutExtension) + .ToArray(); + + if (files.Length == 0) + { + return string.Empty; } - /// - /// Removes the cached files and folders for a set of chapterIds - /// - /// - public void CleanupBookmarks(IEnumerable seriesIds) - { - foreach (var series in seriesIds) - { - _directoryService.ClearAndDeleteDirectory(GetBookmarkCachePath(series)); - } - } + // Since array is 0 based, we need to keep that in account (only affects last image) + return page == files.Length ? files.ElementAt(page - 1) : files.ElementAt(page); + } + public async Task CacheBookmarkForSeries(int userId, int seriesId) + { + var destDirectory = _directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, seriesId + "_bookmarks"); + if (_directoryService.Exists(destDirectory)) return _directoryService.GetFiles(destDirectory).Count(); - /// - /// Returns the cache path for a given Chapter. Should be cacheDirectory/{chapterId}/ - /// - /// - /// - private string GetCachePath(int chapterId) - { - return _directoryService.FileSystem.Path.GetFullPath(_directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, $"{chapterId}/")); - } + var bookmarkDtos = await _unitOfWork.UserRepository.GetBookmarkDtosForSeries(userId, seriesId); + var files = (await _bookmarkService.GetBookmarkFilesById(bookmarkDtos.Select(b => b.Id))).ToList(); + _directoryService.CopyFilesToDirectory(files, destDirectory); + _directoryService.Flatten(destDirectory); + return files.Count; + } - private string GetBookmarkCachePath(int seriesId) - { - return _directoryService.FileSystem.Path.GetFullPath(_directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, $"{seriesId}_bookmarks/")); - } + /// + /// Clears a cached bookmarks for a series id folder + /// + /// + public void CleanupBookmarkCache(int seriesId) + { + var destDirectory = _directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, seriesId + "_bookmarks"); + if (!_directoryService.Exists(destDirectory)) return; - /// - /// Returns the absolute path of a cached page. - /// - /// Chapter entity with Files populated. - /// Page number to look for - /// Page filepath or empty if no files found. - public string GetCachedPagePath(Chapter chapter, int page) - { - // Calculate what chapter the page belongs to - var path = GetCachePath(chapter.Id); - // TODO: We can optimize this by extracting and renaming, so we don't need to scan for the files and can do a direct access - var files = _directoryService.GetFilesWithExtension(path, Tasks.Scanner.Parser.Parser.ImageFileExtensions) - .OrderByNatural(Path.GetFileNameWithoutExtension) - .ToArray(); - - if (files.Length == 0) - { - return string.Empty; - } - - // Since array is 0 based, we need to keep that in account (only affects last image) - return page == files.Length ? files.ElementAt(page - 1) : files.ElementAt(page); - } - - public async Task CacheBookmarkForSeries(int userId, int seriesId) - { - var destDirectory = _directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, seriesId + "_bookmarks"); - if (_directoryService.Exists(destDirectory)) return _directoryService.GetFiles(destDirectory).Count(); - - var bookmarkDtos = await _unitOfWork.UserRepository.GetBookmarkDtosForSeries(userId, seriesId); - var files = (await _bookmarkService.GetBookmarkFilesById(bookmarkDtos.Select(b => b.Id))).ToList(); - _directoryService.CopyFilesToDirectory(files, destDirectory); - _directoryService.Flatten(destDirectory); - return files.Count; - } - - /// - /// Clears a cached bookmarks for a series id folder - /// - /// - public void CleanupBookmarkCache(int seriesId) - { - var destDirectory = _directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, seriesId + "_bookmarks"); - if (!_directoryService.Exists(destDirectory)) return; - - _directoryService.ClearAndDeleteDirectory(destDirectory); - } + _directoryService.ClearAndDeleteDirectory(destDirectory); } } diff --git a/API/Services/DeviceService.cs b/API/Services/DeviceService.cs new file mode 100644 index 000000000..ca846381a --- /dev/null +++ b/API/Services/DeviceService.cs @@ -0,0 +1,125 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.DTOs.Device; +using API.DTOs.Email; +using API.Entities; +using API.Entities.Enums; +using API.SignalR; +using Kavita.Common; +using Microsoft.Extensions.Logging; + +namespace API.Services; + +public interface IDeviceService +{ + Task Create(CreateDeviceDto dto, AppUser userWithDevices); + Task Update(UpdateDeviceDto dto, AppUser userWithDevices); + Task Delete(AppUser userWithDevices, int deviceId); + Task SendTo(IReadOnlyList chapterIds, int deviceId); +} + +public class DeviceService : IDeviceService +{ + private readonly IUnitOfWork _unitOfWork; + private readonly ILogger _logger; + private readonly IEmailService _emailService; + + public DeviceService(IUnitOfWork unitOfWork, ILogger logger, IEmailService emailService) + { + _unitOfWork = unitOfWork; + _logger = logger; + _emailService = emailService; + } + #nullable enable + public async Task Create(CreateDeviceDto dto, AppUser userWithDevices) + { + try + { + userWithDevices.Devices ??= new List(); + var existingDevice = userWithDevices.Devices.SingleOrDefault(d => d.Name.Equals(dto.Name)); + if (existingDevice != null) throw new KavitaException("A device with this name already exists"); + + existingDevice = DbFactory.Device(dto.Name); + existingDevice.Platform = dto.Platform; + existingDevice.EmailAddress = dto.EmailAddress; + + + userWithDevices.Devices.Add(existingDevice); + _unitOfWork.UserRepository.Update(userWithDevices); + + if (!_unitOfWork.HasChanges()) return existingDevice; + if (await _unitOfWork.CommitAsync()) return existingDevice; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an error when creating your device"); + await _unitOfWork.RollbackAsync(); + } + + return null; + } + + public async Task Update(UpdateDeviceDto dto, AppUser userWithDevices) + { + try + { + var existingDevice = userWithDevices.Devices.SingleOrDefault(d => d.Id == dto.Id); + if (existingDevice == null) throw new KavitaException("This device doesn't exist yet. Please create first"); + + existingDevice.Name = dto.Name; + existingDevice.Platform = dto.Platform; + existingDevice.EmailAddress = dto.EmailAddress; + + if (!_unitOfWork.HasChanges()) return existingDevice; + if (await _unitOfWork.CommitAsync()) return existingDevice; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an error when updating your device"); + await _unitOfWork.RollbackAsync(); + } + + return null; + } + #nullable disable + + public async Task Delete(AppUser userWithDevices, int deviceId) + { + try + { + userWithDevices.Devices = userWithDevices.Devices.Where(d => d.Id != deviceId).ToList(); + _unitOfWork.UserRepository.Update(userWithDevices); + if (!_unitOfWork.HasChanges()) return true; + if (await _unitOfWork.CommitAsync()) return true; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue with deleting the device, {DeviceId} for user {UserName}", deviceId, userWithDevices.UserName); + } + + return false; + } + + public async Task SendTo(IReadOnlyList chapterIds, int deviceId) + { + var files = await _unitOfWork.ChapterRepository.GetFilesForChaptersAsync(chapterIds); + if (files.Any(f => f.Format is not (MangaFormat.Epub or MangaFormat.Pdf))) + throw new KavitaException("Cannot Send non Epub or Pdf to devices as not supported"); + + var device = await _unitOfWork.DeviceRepository.GetDeviceById(deviceId); + if (device == null) throw new KavitaException("Device doesn't exist"); + device.LastUsed = DateTime.Now; + _unitOfWork.DeviceRepository.Update(device); + await _unitOfWork.CommitAsync(); + var success = await _emailService.SendFilesToEmail(new SendToDto() + { + DestinationEmail = device.EmailAddress, + FilePaths = files.Select(m => m.FilePath) + }); + + return success; + } +} diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 54757f651..dbf7214cb 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -14,928 +14,937 @@ using Kavita.Common.Helpers; using Microsoft.Extensions.FileSystemGlobbing; using Microsoft.Extensions.Logging; -namespace API.Services +namespace API.Services; + +public interface IDirectoryService { - public interface IDirectoryService - { - IFileSystem FileSystem { get; } - string CacheDirectory { get; } - string CoverImageDirectory { get; } - string LogDirectory { get; } - string TempDirectory { get; } - string ConfigDirectory { get; } - string SiteThemeDirectory { get; } - /// - /// Original BookmarkDirectory. Only used for resetting directory. Use for actual path. - /// - string BookmarkDirectory { get; } - /// - /// Lists out top-level folders for a given directory. Filters out System and Hidden folders. - /// - /// Absolute path of directory to scan. - /// List of folder names - IEnumerable ListDirectory(string rootPath); - Task ReadFileAsync(string path); - bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = ""); - bool Exists(string directory); - void CopyFileToDirectory(string fullFilePath, string targetDirectory); - int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger); - bool IsDriveMounted(string path); - bool IsDirectoryEmpty(string path); - long GetTotalSize(IEnumerable paths); - void ClearDirectory(string directoryPath); - void ClearAndDeleteDirectory(string directoryPath); - string[] GetFilesWithExtension(string path, string searchPatternExpression = ""); - bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = ""); + IFileSystem FileSystem { get; } + string CacheDirectory { get; } + string CoverImageDirectory { get; } + string LogDirectory { get; } + string TempDirectory { get; } + string ConfigDirectory { get; } + string SiteThemeDirectory { get; } + /// + /// Original BookmarkDirectory. Only used for resetting directory. Use for actual path. + /// + string BookmarkDirectory { get; } + /// + /// Lists out top-level folders for a given directory. Filters out System and Hidden folders. + /// + /// Absolute path of directory to scan. + /// List of folder names + IEnumerable ListDirectory(string rootPath); + Task ReadFileAsync(string path); + bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = ""); + bool Exists(string directory); + void CopyFileToDirectory(string fullFilePath, string targetDirectory); + int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger); + bool IsDriveMounted(string path); + bool IsDirectoryEmpty(string path); + long GetTotalSize(IEnumerable paths); + void ClearDirectory(string directoryPath); + void ClearAndDeleteDirectory(string directoryPath); + string[] GetFilesWithExtension(string path, string searchPatternExpression = ""); + bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = ""); - Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, - IList filePaths); + Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, + IList filePaths); - IEnumerable GetFoldersTillRoot(string rootPath, string fullPath); + IEnumerable GetFoldersTillRoot(string rootPath, string fullPath); - IEnumerable GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly); + IEnumerable GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly); - bool ExistOrCreate(string directoryPath); - void DeleteFiles(IEnumerable files); - void RemoveNonImages(string directoryName); - void Flatten(string directoryName); - Task CheckWriteAccess(string directoryName); + bool ExistOrCreate(string directoryPath); + void DeleteFiles(IEnumerable files); + void RemoveNonImages(string directoryName); + void Flatten(string directoryName); + Task CheckWriteAccess(string directoryName); - IEnumerable GetFilesWithCertainExtensions(string path, - string searchPatternExpression = "", - SearchOption searchOption = SearchOption.TopDirectoryOnly); + IEnumerable GetFilesWithCertainExtensions(string path, + string searchPatternExpression = "", + SearchOption searchOption = SearchOption.TopDirectoryOnly); - IEnumerable GetDirectories(string folderPath); - IEnumerable GetDirectories(string folderPath, GlobMatcher matcher); - string GetParentDirectoryName(string fileOrFolder); - #nullable enable - IList ScanFiles(string folderPath, GlobMatcher? matcher = null); - DateTime GetLastWriteTime(string folderPath); - GlobMatcher CreateMatcherFromFile(string filePath); + IEnumerable GetDirectories(string folderPath); + IEnumerable GetDirectories(string folderPath, GlobMatcher matcher); + string GetParentDirectoryName(string fileOrFolder); +#nullable enable + IList ScanFiles(string folderPath, GlobMatcher? matcher = null); + DateTime GetLastWriteTime(string folderPath); + GlobMatcher CreateMatcherFromFile(string filePath); #nullable disable - } - public class DirectoryService : IDirectoryService +} +public class DirectoryService : IDirectoryService +{ + public const string KavitaIgnoreFile = ".kavitaignore"; + public IFileSystem FileSystem { get; } + public string CacheDirectory { get; } + public string CoverImageDirectory { get; } + public string LogDirectory { get; } + public string TempDirectory { get; } + public string ConfigDirectory { get; } + public string BookmarkDirectory { get; } + public string SiteThemeDirectory { get; } + private readonly ILogger _logger; + + private static readonly Regex ExcludeDirectories = new Regex( + @"@eaDir|\.DS_Store|\.qpkg|__MACOSX|@Recently-Snapshot|@recycle", + RegexOptions.Compiled | RegexOptions.IgnoreCase); + private static readonly Regex FileCopyAppend = new Regex(@"\(\d+\)", + RegexOptions.Compiled | RegexOptions.IgnoreCase); + public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups"); + + public DirectoryService(ILogger logger, IFileSystem fileSystem) { - public const string KavitaIgnoreFile = ".kavitaignore"; - public IFileSystem FileSystem { get; } - public string CacheDirectory { get; } - public string CoverImageDirectory { get; } - public string LogDirectory { get; } - public string TempDirectory { get; } - public string ConfigDirectory { get; } - public string BookmarkDirectory { get; } - public string SiteThemeDirectory { get; } - private readonly ILogger _logger; + _logger = logger; + FileSystem = fileSystem; + CoverImageDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "covers"); + CacheDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "cache"); + LogDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "logs"); + TempDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "temp"); + ConfigDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config"); + BookmarkDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "bookmarks"); + SiteThemeDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "themes"); - private static readonly Regex ExcludeDirectories = new Regex( - @"@eaDir|\.DS_Store|\.qpkg|__MACOSX|@Recently-Snapshot|@recycle", - RegexOptions.Compiled | RegexOptions.IgnoreCase); - private static readonly Regex FileCopyAppend = new Regex(@"\(\d+\)", - RegexOptions.Compiled | RegexOptions.IgnoreCase); - public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups"); + ExistOrCreate(SiteThemeDirectory); + ExistOrCreate(CoverImageDirectory); + ExistOrCreate(CacheDirectory); + ExistOrCreate(LogDirectory); + ExistOrCreate(TempDirectory); + ExistOrCreate(BookmarkDirectory); + } - public DirectoryService(ILogger logger, IFileSystem fileSystem) - { - _logger = logger; - FileSystem = fileSystem; - CoverImageDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "covers"); - CacheDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "cache"); - LogDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "logs"); - TempDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "temp"); - ConfigDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config"); - BookmarkDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "bookmarks"); - SiteThemeDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "themes"); + /// + /// Given a set of regex search criteria, get files in the given path. + /// + /// This will always exclude patterns + /// Directory to search + /// Regex version of search pattern (ie \.mp3|\.mp4). Defaults to * meaning all files. + /// SearchOption to use, defaults to TopDirectoryOnly + /// List of file paths + public IEnumerable GetFilesWithCertainExtensions(string path, + string searchPatternExpression = "", + SearchOption searchOption = SearchOption.TopDirectoryOnly) + { + if (!FileSystem.Directory.Exists(path)) return ImmutableList.Empty; + var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); - ExistOrCreate(SiteThemeDirectory); - ExistOrCreate(CoverImageDirectory); - ExistOrCreate(CacheDirectory); - ExistOrCreate(LogDirectory); - ExistOrCreate(TempDirectory); - ExistOrCreate(BookmarkDirectory); - } - - /// - /// Given a set of regex search criteria, get files in the given path. - /// - /// This will always exclude patterns - /// Directory to search - /// Regex version of search pattern (ie \.mp3|\.mp4). Defaults to * meaning all files. - /// SearchOption to use, defaults to TopDirectoryOnly - /// List of file paths - public IEnumerable GetFilesWithCertainExtensions(string path, - string searchPatternExpression = "", - SearchOption searchOption = SearchOption.TopDirectoryOnly) - { - if (!FileSystem.Directory.Exists(path)) return ImmutableList.Empty; - var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); - - return FileSystem.Directory.EnumerateFiles(path, "*", searchOption) - .Where(file => + return FileSystem.Directory.EnumerateFiles(path, "*", searchOption) + .Where(file => reSearchPattern.IsMatch(FileSystem.Path.GetExtension(file)) && !FileSystem.Path.GetFileName(file).StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith)); - } + } - /// - /// Returns a list of folders from end of fullPath to rootPath. If a file is passed at the end of the fullPath, it will be ignored. - /// - /// Example) (C:/Manga/, C:/Manga/Love Hina/Specials/Omake/) returns [Omake, Specials, Love Hina] - /// - /// - /// - /// - public IEnumerable GetFoldersTillRoot(string rootPath, string fullPath) - { - var separator = FileSystem.Path.AltDirectorySeparatorChar; - if (fullPath.Contains(FileSystem.Path.DirectorySeparatorChar)) - { - fullPath = fullPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar); - } + /// + /// Returns a list of folders from end of fullPath to rootPath. If a file is passed at the end of the fullPath, it will be ignored. + /// + /// Example) (C:/Manga/, C:/Manga/Love Hina/Specials/Omake/) returns [Omake, Specials, Love Hina] + /// + /// + /// + /// + public IEnumerable GetFoldersTillRoot(string rootPath, string fullPath) + { + var separator = FileSystem.Path.AltDirectorySeparatorChar; + if (fullPath.Contains(FileSystem.Path.DirectorySeparatorChar)) + { + fullPath = fullPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar); + } - if (rootPath.Contains(Path.DirectorySeparatorChar)) - { - rootPath = rootPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar); - } + if (rootPath.Contains(Path.DirectorySeparatorChar)) + { + rootPath = rootPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar); + } - var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath; - var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath; - var paths = new List(); - // If a file is at the end of the path, remove it before we start processing folders - if (FileSystem.Path.GetExtension(path) != string.Empty) - { - path = path.Substring(0, path.LastIndexOf(separator)); - } + var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath; + var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath; + var paths = new List(); + // If a file is at the end of the path, remove it before we start processing folders + if (FileSystem.Path.GetExtension(path) != string.Empty) + { + path = path.Substring(0, path.LastIndexOf(separator)); + } - while (FileSystem.Path.GetDirectoryName(path) != Path.GetDirectoryName(root)) - { - var folder = FileSystem.DirectoryInfo.FromDirectoryName(path).Name; - paths.Add(folder); - path = path.Substring(0, path.LastIndexOf(separator)); - } + while (FileSystem.Path.GetDirectoryName(path) != Path.GetDirectoryName(root)) + { + var folder = FileSystem.DirectoryInfo.FromDirectoryName(path).Name; + paths.Add(folder); + path = path.Substring(0, path.LastIndexOf(separator)); + } - return paths; - } + return paths; + } - /// - /// Does Directory Exist - /// - /// - /// - public bool Exists(string directory) - { - var di = FileSystem.DirectoryInfo.FromDirectoryName(directory); - return di.Exists; - } + /// + /// Does Directory Exist + /// + /// + /// + public bool Exists(string directory) + { + var di = FileSystem.DirectoryInfo.FromDirectoryName(directory); + return di.Exists; + } - /// - /// Get files given a path. - /// - /// This will automatically filter out restricted files, like MacOsMetadata files - /// - /// An optional regex string to search against. Will use file path to match against. - /// Defaults to top level directory only, can be given all to provide recursive searching - /// - public IEnumerable GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly) - { - if (!FileSystem.Directory.Exists(path)) return ImmutableList.Empty; + /// + /// Get files given a path. + /// + /// This will automatically filter out restricted files, like MacOsMetadata files + /// + /// An optional regex string to search against. Will use file path to match against. + /// Defaults to top level directory only, can be given all to provide recursive searching + /// + public IEnumerable GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly) + { + if (!FileSystem.Directory.Exists(path)) return ImmutableList.Empty; - if (fileNameRegex != string.Empty) - { - var reSearchPattern = new Regex(fileNameRegex, RegexOptions.IgnoreCase); - return FileSystem.Directory.EnumerateFiles(path, "*", searchOption) + if (fileNameRegex != string.Empty) + { + var reSearchPattern = new Regex(fileNameRegex, RegexOptions.IgnoreCase); + return FileSystem.Directory.EnumerateFiles(path, "*", searchOption) .Where(file => { var fileName = FileSystem.Path.GetFileName(file); return reSearchPattern.IsMatch(fileName) && !fileName.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith); }); - } - - return FileSystem.Directory.EnumerateFiles(path, "*", searchOption).Where(file => - !FileSystem.Path.GetFileName(file).StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith)); - } - - /// - /// Copies a file into a directory. Does not maintain parent folder of file. - /// Will create target directory if doesn't exist. Automatically overwrites what is there. - /// - /// - /// - public void CopyFileToDirectory(string fullFilePath, string targetDirectory) - { - try - { - var fileInfo = FileSystem.FileInfo.FromFileName(fullFilePath); - if (!fileInfo.Exists) return; - - ExistOrCreate(targetDirectory); - fileInfo.CopyTo(FileSystem.Path.Join(targetDirectory, fileInfo.Name), true); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was a critical error when copying {File} to {Directory}", fullFilePath, targetDirectory); - } - } - - /// - /// Copies all files and subdirectories within a directory to a target location - /// - /// Directory to copy from. Does not copy the parent folder - /// Destination to copy to. Will be created if doesn't exist - /// Defaults to all files - /// If was successful - /// Thrown when source directory does not exist - public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "") - { - if (string.IsNullOrEmpty(sourceDirName)) return false; - - // Get the subdirectories for the specified directory. - var dir = FileSystem.DirectoryInfo.FromDirectoryName(sourceDirName); - - if (!dir.Exists) - { - throw new DirectoryNotFoundException( - "Source directory does not exist or could not be found: " - + sourceDirName); - } - - var dirs = dir.GetDirectories(); - - // If the destination directory doesn't exist, create it. - ExistOrCreate(destDirName); - - // Get the files in the directory and copy them to the new location. - var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => FileSystem.FileInfo.FromFileName(n)); - foreach (var file in files) - { - var tempPath = FileSystem.Path.Combine(destDirName, file.Name); - file.CopyTo(tempPath, false); - } - - // If copying subdirectories, copy them and their contents to new location. - foreach (var subDir in dirs) - { - var tempPath = FileSystem.Path.Combine(destDirName, subDir.Name); - CopyDirectoryToDirectory(subDir.FullName, tempPath); - } - - return true; - } - - /// - /// Checks if the root path of a path exists or not. - /// - /// - /// - public bool IsDriveMounted(string path) - { - return FileSystem.DirectoryInfo.FromDirectoryName(FileSystem.Path.GetPathRoot(path) ?? string.Empty).Exists; - } - - - /// - /// Checks if the root path of a path is empty or not. - /// - /// - /// - public bool IsDirectoryEmpty(string path) - { - return FileSystem.Directory.Exists(path) && !FileSystem.Directory.EnumerateFileSystemEntries(path).Any(); } - public string[] GetFilesWithExtension(string path, string searchPatternExpression = "") - { - if (searchPatternExpression != string.Empty) - { - return GetFilesWithCertainExtensions(path, searchPatternExpression).ToArray(); - } + return FileSystem.Directory.EnumerateFiles(path, "*", searchOption).Where(file => + !FileSystem.Path.GetFileName(file).StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith)); + } - return !FileSystem.Directory.Exists(path) ? Array.Empty() : FileSystem.Directory.GetFiles(path); - } - - /// - /// Returns the total number of bytes for a given set of full file paths - /// - /// - /// Total bytes - public long GetTotalSize(IEnumerable paths) - { - return paths.Sum(path => FileSystem.FileInfo.FromFileName(path).Length); - } - - /// - /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. - /// - /// - /// - public bool ExistOrCreate(string directoryPath) - { - var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); - if (di.Exists) return true; - try - { - FileSystem.Directory.CreateDirectory(directoryPath); - } - catch (Exception) - { - return false; - } - return true; - } - - /// - /// Deletes all files within the directory, then the directory itself. - /// - /// - public void ClearAndDeleteDirectory(string directoryPath) - { - if (!FileSystem.Directory.Exists(directoryPath)) return; - - var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); - - ClearDirectory(directoryPath); - - di.Delete(true); - } - - /// - /// Deletes all files and folders within the directory path - /// - /// - /// - public void ClearDirectory(string directoryPath) - { - var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); - if (!di.Exists) return; - - foreach (var file in di.EnumerateFiles()) - { - file.Delete(); - } - foreach (var dir in di.EnumerateDirectories()) - { - dir.Delete(true); - } - } - - /// - /// Copies files to a destination directory. If the destination directory doesn't exist, this will create it. - /// - /// If a file already exists in dest, this will rename as (2). It does not support multiple iterations of this. Overwriting is not supported. - /// - /// - /// An optional string to prepend to the target file's name - /// - public bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = "") - { - ExistOrCreate(directoryPath); - string currentFile = null; - try - { - foreach (var file in filePaths) - { - currentFile = file; - - if (!FileSystem.File.Exists(file)) - { - _logger.LogError("Unable to copy {File} to {DirectoryPath} as it doesn't exist", file, directoryPath); - continue; - } - var fileInfo = FileSystem.FileInfo.FromFileName(file); - var targetFile = FileSystem.FileInfo.FromFileName(RenameFileForCopy(file, directoryPath, prepend)); - - fileInfo.CopyTo(FileSystem.Path.Join(directoryPath, targetFile.Name)); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath); - return false; - } - - return true; - } - - /// - /// Generates the combined filepath given a prepend (optional), output directory path, and a full input file path. - /// If the output file already exists, will append (1), (2), etc until it can be written out - /// - /// - /// - /// - /// - private string RenameFileForCopy(string fileToCopy, string directoryPath, string prepend = "") - { - var fileInfo = FileSystem.FileInfo.FromFileName(fileToCopy); - var filename = prepend + fileInfo.Name; - - var targetFile = FileSystem.FileInfo.FromFileName(FileSystem.Path.Join(directoryPath, filename)); - if (!targetFile.Exists) - { - return targetFile.FullName; - } - - var noExtension = FileSystem.Path.GetFileNameWithoutExtension(fileInfo.Name); - if (FileCopyAppend.IsMatch(noExtension)) - { - var match = FileCopyAppend.Match(noExtension).Value; - var matchNumber = match.Replace("(", string.Empty).Replace(")", string.Empty); - noExtension = noExtension.Replace(match, $"({int.Parse(matchNumber) + 1})"); - } - else - { - noExtension += " (1)"; - } - - var newFilename = prepend + noExtension + - FileSystem.Path.GetExtension(fileInfo.Name); - return RenameFileForCopy(FileSystem.Path.Join(directoryPath, newFilename), directoryPath, prepend); - } - - /// - /// Lists all directories in a root path. Will exclude Hidden or System directories. - /// - /// - /// - public IEnumerable ListDirectory(string rootPath) + /// + /// Copies a file into a directory. Does not maintain parent folder of file. + /// Will create target directory if doesn't exist. Automatically overwrites what is there. + /// + /// + /// + public void CopyFileToDirectory(string fullFilePath, string targetDirectory) + { + try { - if (!FileSystem.Directory.Exists(rootPath)) return ImmutableList.Empty; + var fileInfo = FileSystem.FileInfo.FromFileName(fullFilePath); + if (!fileInfo.Exists) return; - var di = FileSystem.DirectoryInfo.FromDirectoryName(rootPath); - var dirs = di.GetDirectories() - .Where(dir => !(dir.Attributes.HasFlag(FileAttributes.Hidden) || dir.Attributes.HasFlag(FileAttributes.System))) - .Select(d => new DirectoryDto() + ExistOrCreate(targetDirectory); + fileInfo.CopyTo(FileSystem.Path.Join(targetDirectory, fileInfo.Name), true); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was a critical error when copying {File} to {Directory}", fullFilePath, targetDirectory); + } + } + + /// + /// Copies all files and subdirectories within a directory to a target location + /// + /// Directory to copy from. Does not copy the parent folder + /// Destination to copy to. Will be created if doesn't exist + /// Defaults to all files + /// If was successful + /// Thrown when source directory does not exist + public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "") + { + if (string.IsNullOrEmpty(sourceDirName)) return false; + + // Get the subdirectories for the specified directory. + var dir = FileSystem.DirectoryInfo.FromDirectoryName(sourceDirName); + + if (!dir.Exists) + { + throw new DirectoryNotFoundException( + "Source directory does not exist or could not be found: " + + sourceDirName); + } + + var dirs = dir.GetDirectories(); + + // If the destination directory doesn't exist, create it. + ExistOrCreate(destDirName); + + // Get the files in the directory and copy them to the new location. + var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => FileSystem.FileInfo.FromFileName(n)); + foreach (var file in files) + { + var tempPath = FileSystem.Path.Combine(destDirName, file.Name); + file.CopyTo(tempPath, false); + } + + // If copying subdirectories, copy them and their contents to new location. + foreach (var subDir in dirs) + { + var tempPath = FileSystem.Path.Combine(destDirName, subDir.Name); + CopyDirectoryToDirectory(subDir.FullName, tempPath); + } + + return true; + } + + /// + /// Checks if the root path of a path exists or not. + /// + /// + /// + public bool IsDriveMounted(string path) + { + return FileSystem.DirectoryInfo.FromDirectoryName(FileSystem.Path.GetPathRoot(path) ?? string.Empty).Exists; + } + + + /// + /// Checks if the root path of a path is empty or not. + /// + /// + /// + public bool IsDirectoryEmpty(string path) + { + return FileSystem.Directory.Exists(path) && !FileSystem.Directory.EnumerateFileSystemEntries(path).Any(); + } + + public string[] GetFilesWithExtension(string path, string searchPatternExpression = "") + { + if (searchPatternExpression != string.Empty) + { + return GetFilesWithCertainExtensions(path, searchPatternExpression).ToArray(); + } + + return !FileSystem.Directory.Exists(path) ? Array.Empty() : FileSystem.Directory.GetFiles(path); + } + + /// + /// Returns the total number of bytes for a given set of full file paths + /// + /// + /// Total bytes + public long GetTotalSize(IEnumerable paths) + { + return paths.Sum(path => FileSystem.FileInfo.FromFileName(path).Length); + } + + /// + /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. + /// + /// + /// + public bool ExistOrCreate(string directoryPath) + { + var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); + if (di.Exists) return true; + try + { + FileSystem.Directory.CreateDirectory(directoryPath); + } + catch (Exception) + { + return false; + } + return true; + } + + /// + /// Deletes all files within the directory, then the directory itself. + /// + /// + public void ClearAndDeleteDirectory(string directoryPath) + { + if (!FileSystem.Directory.Exists(directoryPath)) return; + + var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); + + ClearDirectory(directoryPath); + + di.Delete(true); + } + + /// + /// Deletes all files and folders within the directory path + /// + /// + /// + public void ClearDirectory(string directoryPath) + { + var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); + if (!di.Exists) return; + try + { + foreach (var file in di.EnumerateFiles()) + { + file.Delete(); + } + foreach (var dir in di.EnumerateDirectories()) + { + dir.Delete(true); + } + } + catch (UnauthorizedAccessException ex) + { + _logger.LogError(ex, "[ClearDirectory] Could not delete {DirectoryPath} due to permission issue", directoryPath); + } + + } + + /// + /// Copies files to a destination directory. If the destination directory doesn't exist, this will create it. + /// + /// If a file already exists in dest, this will rename as (2). It does not support multiple iterations of this. Overwriting is not supported. + /// + /// + /// An optional string to prepend to the target file's name + /// + public bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = "") + { + ExistOrCreate(directoryPath); + string currentFile = null; + try + { + foreach (var file in filePaths) + { + currentFile = file; + + if (!FileSystem.File.Exists(file)) { - Name = d.Name, - FullPath = d.FullName, - }).ToImmutableList(); + _logger.LogError("Unable to copy {File} to {DirectoryPath} as it doesn't exist", file, directoryPath); + continue; + } + var fileInfo = FileSystem.FileInfo.FromFileName(file); + var targetFile = FileSystem.FileInfo.FromFileName(RenameFileForCopy(file, directoryPath, prepend)); - return dirs; + fileInfo.CopyTo(FileSystem.Path.Join(directoryPath, targetFile.Name)); + } } - - /// - /// Reads a file's into byte[]. Returns empty array if file doesn't exist. - /// - /// - /// - public async Task ReadFileAsync(string path) - { - if (!FileSystem.File.Exists(path)) return Array.Empty(); - return await FileSystem.File.ReadAllBytesAsync(path); - } - - - /// - /// Finds the highest directories from a set of file paths. Does not return the root path, will always select the highest non-root path. - /// - /// If the file paths do not contain anything from libraryFolders, this returns an empty dictionary back - /// List of top level folders which files belong to - /// List of file paths that belong to libraryFolders - /// - public Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, IList filePaths) - { - var stopLookingForDirectories = false; - var dirs = new Dictionary(); - foreach (var folder in libraryFolders.Select(Tasks.Scanner.Parser.Parser.NormalizePath)) - { - if (stopLookingForDirectories) break; - foreach (var file in filePaths.Select(Tasks.Scanner.Parser.Parser.NormalizePath)) - { - if (!file.Contains(folder)) continue; - - var parts = GetFoldersTillRoot(folder, file).ToList(); - if (parts.Count == 0) - { - // Break from all loops, we done, just scan folder.Path (library root) - dirs.Add(folder, string.Empty); - stopLookingForDirectories = true; - break; - } - - var fullPath = Tasks.Scanner.Parser.Parser.NormalizePath(Path.Join(folder, parts.Last())); - if (!dirs.ContainsKey(fullPath)) - { - dirs.Add(fullPath, string.Empty); - } - } - } - - return dirs; - } - - /// - /// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope. - /// - /// - /// List of directory paths, empty if path doesn't exist - public IEnumerable GetDirectories(string folderPath) - { - if (!FileSystem.Directory.Exists(folderPath)) return ImmutableArray.Empty; - return FileSystem.Directory.GetDirectories(folderPath) - .Where(path => ExcludeDirectories.Matches(path).Count == 0); - } - - /// - /// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope. - /// - /// - /// A set of glob rules that will filter directories out - /// List of directory paths, empty if path doesn't exist - public IEnumerable GetDirectories(string folderPath, GlobMatcher matcher) - { - if (matcher == null) return GetDirectories(folderPath); - - return GetDirectories(folderPath) - .Where(folder => !matcher.ExcludeMatches( - $"{FileSystem.DirectoryInfo.FromDirectoryName(folder).Name}{FileSystem.Path.AltDirectorySeparatorChar}")); - } - - /// - /// Returns all directories, including subdirectories. Automatically excludes directories that shouldn't be in scope. - /// - /// - /// - public IEnumerable GetAllDirectories(string folderPath) - { - if (!FileSystem.Directory.Exists(folderPath)) return ImmutableArray.Empty; - var directories = new List(); - - var foundDirs = GetDirectories(folderPath); - foreach (var foundDir in foundDirs) - { - directories.Add(foundDir); - directories.AddRange(GetAllDirectories(foundDir)); - } - - return directories; - } - - /// - /// Returns the parent directories name for a file or folder. Empty string is path is not valid. - /// - /// - /// - public string GetParentDirectoryName(string fileOrFolder) - { - try - { - return Tasks.Scanner.Parser.Parser.NormalizePath(Directory.GetParent(fileOrFolder)?.FullName); - } - catch (Exception) - { - return string.Empty; - } - } - - /// - /// Scans a directory by utilizing a recursive folder search. If a .kavitaignore file is found, will ignore matching patterns - /// - /// - /// - /// - public IList ScanFiles(string folderPath, GlobMatcher? matcher = null) - { - _logger.LogDebug("[ScanFiles] called on {Path}", folderPath); - var files = new List(); - if (!Exists(folderPath)) return files; - - var potentialIgnoreFile = FileSystem.Path.Join(folderPath, KavitaIgnoreFile); - if (matcher == null) - { - matcher = CreateMatcherFromFile(potentialIgnoreFile); - } - else - { - matcher.Merge(CreateMatcherFromFile(potentialIgnoreFile)); - } - - - var directories = GetDirectories(folderPath, matcher); - - foreach (var directory in directories) - { - files.AddRange(ScanFiles(directory, matcher)); - } - - - // Get the matcher from either ignore or global (default setup) - if (matcher == null) - { - files.AddRange(GetFilesWithCertainExtensions(folderPath, Tasks.Scanner.Parser.Parser.SupportedExtensions)); - } - else - { - var foundFiles = GetFilesWithCertainExtensions(folderPath, - Tasks.Scanner.Parser.Parser.SupportedExtensions) - .Where(file => !matcher.ExcludeMatches(FileSystem.FileInfo.FromFileName(file).Name)); - files.AddRange(foundFiles); - } - - return files; - } - - /// - /// Recursively scans a folder and returns the max last write time on any folders and files - /// - /// - /// Max Last Write Time - public DateTime GetLastWriteTime(string folderPath) - { - if (!FileSystem.Directory.Exists(folderPath)) throw new IOException($"{folderPath} does not exist"); - return Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories).Max(path => FileSystem.File.GetLastWriteTime(path)); - } - - /// - /// Generates a GlobMatcher from a .kavitaignore file found at path. Returns null otherwise. - /// - /// - /// - public GlobMatcher CreateMatcherFromFile(string filePath) - { - if (!FileSystem.File.Exists(filePath)) - { - return null; - } - - // Read file in and add each line to Matcher - var lines = FileSystem.File.ReadAllLines(filePath); - if (lines.Length == 0) - { - return null; - } - - GlobMatcher matcher = new(); - foreach (var line in lines) - { - matcher.AddExclude(line); - } - - return matcher; - } - - - /// - /// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed - /// up processing. - /// NOTE: This is no longer parallel due to user's machines locking up - /// - /// Directory to scan - /// Action to apply on file path - /// Regex pattern to search against - /// - /// - public int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger) - { - //Count of files traversed and timer for diagnostic output - var fileCount = 0; - - - // Data structure to hold names of subfolders to be examined for files. - var dirs = new Stack(); - - if (!FileSystem.Directory.Exists(root)) { - throw new ArgumentException("The directory doesn't exist"); - } - - dirs.Push(root); - - while (dirs.Count > 0) { - var currentDir = dirs.Pop(); - IEnumerable subDirs; - string[] files; - - try { - subDirs = GetDirectories(currentDir); - } - // Thrown if we do not have discovery permission on the directory. - catch (UnauthorizedAccessException e) { - logger.LogCritical(e, "Unauthorized access on {Directory}", currentDir); - continue; - } - // Thrown if another process has deleted the directory after we retrieved its name. - catch (DirectoryNotFoundException e) { - logger.LogCritical(e, "Directory not found on {Directory}", currentDir); - continue; - } - - try { - files = GetFilesWithCertainExtensions(currentDir, searchPattern) - .ToArray(); - } - catch (UnauthorizedAccessException e) { - logger.LogCritical(e, "Unauthorized access on a file in {Directory}", currentDir); - continue; - } - catch (DirectoryNotFoundException e) { - logger.LogCritical(e, "Directory not found on a file in {Directory}", currentDir); - continue; - } - catch (IOException e) { - logger.LogCritical(e, "IO exception on a file in {Directory}", currentDir); - continue; - } - - // Execute in parallel if there are enough files in the directory. - // Otherwise, execute sequentially. Files are opened and processed - // synchronously but this could be modified to perform async I/O. - try { - foreach (var file in files) { - action(file); - fileCount++; - } - } - catch (AggregateException ae) { - ae.Handle((ex) => { - if (ex is not UnauthorizedAccessException) return false; - // Here we just output a message and go on. - _logger.LogError(ex, "Unauthorized access on file"); - return true; - // Handle other exceptions here if necessary... - - }); - } - - // Push the subdirectories onto the stack for traversal. - // This could also be done before handing the files. - foreach (var str in subDirs) - dirs.Push(str); - } - - return fileCount; - } - - /// - /// Attempts to delete the files passed to it. Swallows exceptions. - /// - /// Full path of files to delete - public void DeleteFiles(IEnumerable files) - { - foreach (var file in files) - { - try - { - FileSystem.FileInfo.FromFileName(file).Delete(); - } - catch (Exception) - { - /* Swallow exception */ - } - } - } - - /// - /// Returns the human-readable file size for an arbitrary, 64-bit file size - /// The default format is "0.## XB", e.g. "4.2 KB" or "1.43 GB" - /// - /// https://www.somacon.com/p576.php - /// - /// - public static string GetHumanReadableBytes(long bytes) - { - // Get absolute value - var absoluteBytes = (bytes < 0 ? -bytes : bytes); - // Determine the suffix and readable value - string suffix; - double readable; - switch (absoluteBytes) - { - // Exabyte - case >= 0x1000000000000000: - suffix = "EB"; - readable = (bytes >> 50); - break; - // Petabyte - case >= 0x4000000000000: - suffix = "PB"; - readable = (bytes >> 40); - break; - // Terabyte - case >= 0x10000000000: - suffix = "TB"; - readable = (bytes >> 30); - break; - // Gigabyte - case >= 0x40000000: - suffix = "GB"; - readable = (bytes >> 20); - break; - // Megabyte - case >= 0x100000: - suffix = "MB"; - readable = (bytes >> 10); - break; - // Kilobyte - case >= 0x400: - suffix = "KB"; - readable = bytes; - break; - default: - return bytes.ToString("0 B"); // Byte - } - // Divide by 1024 to get fractional value - readable = (readable / 1024); - // Return formatted number with suffix - return readable.ToString("0.## ") + suffix; - } - - /// - /// Removes all files except images from the directory. Includes sub directories. - /// - /// Fully qualified directory - public void RemoveNonImages(string directoryName) + catch (Exception ex) { - DeleteFiles(GetFiles(directoryName, searchOption:SearchOption.AllDirectories).Where(file => !Tasks.Scanner.Parser.Parser.IsImage(file))); + _logger.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath); + return false; } + return true; + } - /// - /// Flattens all files in subfolders to the passed directory recursively. - /// - /// - /// foo - /// ├── 1.txt - /// ├── 2.txt - /// ├── 3.txt - /// ├── 4.txt - /// └── bar - /// ├── 1.txt - /// ├── 2.txt - /// └── 5.txt - /// - /// becomes: - /// foo - /// ├── 1.txt - /// ├── 2.txt - /// ├── 3.txt - /// ├── 4.txt - /// ├── bar_1.txt - /// ├── bar_2.txt - /// └── bar_5.txt - /// - /// Fully qualified Directory name - public void Flatten(string directoryName) + /// + /// Generates the combined filepath given a prepend (optional), output directory path, and a full input file path. + /// If the output file already exists, will append (1), (2), etc until it can be written out + /// + /// + /// + /// + /// + private string RenameFileForCopy(string fileToCopy, string directoryPath, string prepend = "") + { + var fileInfo = FileSystem.FileInfo.FromFileName(fileToCopy); + var filename = prepend + fileInfo.Name; + + var targetFile = FileSystem.FileInfo.FromFileName(FileSystem.Path.Join(directoryPath, filename)); + if (!targetFile.Exists) { - if (string.IsNullOrEmpty(directoryName) || !FileSystem.Directory.Exists(directoryName)) return; - - var directory = FileSystem.DirectoryInfo.FromDirectoryName(directoryName); - - var index = 0; - FlattenDirectory(directory, directory, ref index); + return targetFile.FullName; } - /// - /// Checks whether a directory has write permissions - /// - /// Fully qualified path - /// - public async Task CheckWriteAccess(string directoryName) + var noExtension = FileSystem.Path.GetFileNameWithoutExtension(fileInfo.Name); + if (FileCopyAppend.IsMatch(noExtension)) { - try - { - ExistOrCreate(directoryName); - await FileSystem.File.WriteAllTextAsync( - FileSystem.Path.Join(directoryName, "test.txt"), - string.Empty); - } - catch (Exception) - { - ClearAndDeleteDirectory(directoryName); - return false; - } - - ClearAndDeleteDirectory(directoryName); - return true; + var match = FileCopyAppend.Match(noExtension).Value; + var matchNumber = match.Replace("(", string.Empty).Replace(")", string.Empty); + noExtension = noExtension.Replace(match, $"({int.Parse(matchNumber) + 1})"); + } + else + { + noExtension += " (1)"; } + var newFilename = prepend + noExtension + + FileSystem.Path.GetExtension(fileInfo.Name); + return RenameFileForCopy(FileSystem.Path.Join(directoryPath, newFilename), directoryPath, prepend); + } - private static void FlattenDirectory(IFileSystemInfo root, IDirectoryInfo directory, ref int directoryIndex) - { - if (!root.FullName.Equals(directory.FullName)) + /// + /// Lists all directories in a root path. Will exclude Hidden or System directories. + /// + /// + /// + public IEnumerable ListDirectory(string rootPath) + { + if (!FileSystem.Directory.Exists(rootPath)) return ImmutableList.Empty; + + var di = FileSystem.DirectoryInfo.FromDirectoryName(rootPath); + var dirs = di.GetDirectories() + .Where(dir => !(dir.Attributes.HasFlag(FileAttributes.Hidden) || dir.Attributes.HasFlag(FileAttributes.System))) + .Select(d => new DirectoryDto() { - var fileIndex = 1; + Name = d.Name, + FullPath = d.FullName, + }).ToImmutableList(); - foreach (var file in directory.EnumerateFiles().OrderByNatural(file => file.FullName)) + return dirs; + } + + /// + /// Reads a file's into byte[]. Returns empty array if file doesn't exist. + /// + /// + /// + public async Task ReadFileAsync(string path) + { + if (!FileSystem.File.Exists(path)) return Array.Empty(); + return await FileSystem.File.ReadAllBytesAsync(path); + } + + + /// + /// Finds the highest directories from a set of file paths. Does not return the root path, will always select the highest non-root path. + /// + /// If the file paths do not contain anything from libraryFolders, this returns an empty dictionary back + /// List of top level folders which files belong to + /// List of file paths that belong to libraryFolders + /// + public Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, IList filePaths) + { + var stopLookingForDirectories = false; + var dirs = new Dictionary(); + foreach (var folder in libraryFolders.Select(Tasks.Scanner.Parser.Parser.NormalizePath)) + { + if (stopLookingForDirectories) break; + foreach (var file in filePaths.Select(Tasks.Scanner.Parser.Parser.NormalizePath)) + { + if (!file.Contains(folder)) continue; + + var parts = GetFoldersTillRoot(folder, file).ToList(); + if (parts.Count == 0) { - if (file.Directory == null) continue; - var paddedIndex = Tasks.Scanner.Parser.Parser.PadZeros(directoryIndex + ""); - // We need to rename the files so that after flattening, they are in the order we found them - var newName = $"{paddedIndex}_{Tasks.Scanner.Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}"; - var newPath = Path.Join(root.FullName, newName); - if (!File.Exists(newPath)) file.MoveTo(newPath); - fileIndex++; + // Break from all loops, we done, just scan folder.Path (library root) + dirs.Add(folder, string.Empty); + stopLookingForDirectories = true; + break; } - directoryIndex++; + var fullPath = Tasks.Scanner.Parser.Parser.NormalizePath(Path.Join(folder, parts.Last())); + if (!dirs.ContainsKey(fullPath)) + { + dirs.Add(fullPath, string.Empty); + } + } + } + + return dirs; + } + + /// + /// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope. + /// + /// + /// List of directory paths, empty if path doesn't exist + public IEnumerable GetDirectories(string folderPath) + { + if (!FileSystem.Directory.Exists(folderPath)) return ImmutableArray.Empty; + return FileSystem.Directory.GetDirectories(folderPath) + .Where(path => ExcludeDirectories.Matches(path).Count == 0); + } + + /// + /// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope. + /// + /// + /// A set of glob rules that will filter directories out + /// List of directory paths, empty if path doesn't exist + public IEnumerable GetDirectories(string folderPath, GlobMatcher matcher) + { + if (matcher == null) return GetDirectories(folderPath); + + return GetDirectories(folderPath) + .Where(folder => !matcher.ExcludeMatches( + $"{FileSystem.DirectoryInfo.FromDirectoryName(folder).Name}{FileSystem.Path.AltDirectorySeparatorChar}")); + } + + /// + /// Returns all directories, including subdirectories. Automatically excludes directories that shouldn't be in scope. + /// + /// + /// + public IEnumerable GetAllDirectories(string folderPath) + { + if (!FileSystem.Directory.Exists(folderPath)) return ImmutableArray.Empty; + var directories = new List(); + + var foundDirs = GetDirectories(folderPath); + foreach (var foundDir in foundDirs) + { + directories.Add(foundDir); + directories.AddRange(GetAllDirectories(foundDir)); + } + + return directories; + } + + /// + /// Returns the parent directories name for a file or folder. Empty string is path is not valid. + /// + /// + /// + public string GetParentDirectoryName(string fileOrFolder) + { + try + { + return Tasks.Scanner.Parser.Parser.NormalizePath(Directory.GetParent(fileOrFolder)?.FullName); + } + catch (Exception) + { + return string.Empty; + } + } + + /// + /// Scans a directory by utilizing a recursive folder search. If a .kavitaignore file is found, will ignore matching patterns + /// + /// + /// + /// + public IList ScanFiles(string folderPath, GlobMatcher? matcher = null) + { + _logger.LogDebug("[ScanFiles] called on {Path}", folderPath); + var files = new List(); + if (!Exists(folderPath)) return files; + + var potentialIgnoreFile = FileSystem.Path.Join(folderPath, KavitaIgnoreFile); + if (matcher == null) + { + matcher = CreateMatcherFromFile(potentialIgnoreFile); + } + else + { + matcher.Merge(CreateMatcherFromFile(potentialIgnoreFile)); + } + + + var directories = GetDirectories(folderPath, matcher); + + foreach (var directory in directories) + { + files.AddRange(ScanFiles(directory, matcher)); + } + + + // Get the matcher from either ignore or global (default setup) + if (matcher == null) + { + files.AddRange(GetFilesWithCertainExtensions(folderPath, Tasks.Scanner.Parser.Parser.SupportedExtensions)); + } + else + { + var foundFiles = GetFilesWithCertainExtensions(folderPath, + Tasks.Scanner.Parser.Parser.SupportedExtensions) + .Where(file => !matcher.ExcludeMatches(FileSystem.FileInfo.FromFileName(file).Name)); + files.AddRange(foundFiles); + } + + return files; + } + + /// + /// Recursively scans a folder and returns the max last write time on any folders and files + /// + /// If the folder is empty, this will return MaxValue for a DateTime + /// + /// Max Last Write Time + public DateTime GetLastWriteTime(string folderPath) + { + if (!FileSystem.Directory.Exists(folderPath)) throw new IOException($"{folderPath} does not exist"); + var fileEntries = FileSystem.Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories); + if (fileEntries.Length == 0) return DateTime.MaxValue; + return fileEntries.Max(path => FileSystem.File.GetLastWriteTime(path)); + } + + /// + /// Generates a GlobMatcher from a .kavitaignore file found at path. Returns null otherwise. + /// + /// + /// + public GlobMatcher CreateMatcherFromFile(string filePath) + { + if (!FileSystem.File.Exists(filePath)) + { + return null; + } + + // Read file in and add each line to Matcher + var lines = FileSystem.File.ReadAllLines(filePath); + if (lines.Length == 0) + { + return null; + } + + GlobMatcher matcher = new(); + foreach (var line in lines.Where(s => !string.IsNullOrEmpty(s))) + { + matcher.AddExclude(line); + } + + return matcher; + } + + + /// + /// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed + /// up processing. + /// NOTE: This is no longer parallel due to user's machines locking up + /// + /// Directory to scan + /// Action to apply on file path + /// Regex pattern to search against + /// + /// + public int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger) + { + //Count of files traversed and timer for diagnostic output + var fileCount = 0; + + + // Data structure to hold names of subfolders to be examined for files. + var dirs = new Stack(); + + if (!FileSystem.Directory.Exists(root)) { + throw new ArgumentException("The directory doesn't exist"); + } + + dirs.Push(root); + + while (dirs.Count > 0) { + var currentDir = dirs.Pop(); + IEnumerable subDirs; + string[] files; + + try { + subDirs = GetDirectories(currentDir); + } + // Thrown if we do not have discovery permission on the directory. + catch (UnauthorizedAccessException e) { + logger.LogCritical(e, "Unauthorized access on {Directory}", currentDir); + continue; + } + // Thrown if another process has deleted the directory after we retrieved its name. + catch (DirectoryNotFoundException e) { + logger.LogCritical(e, "Directory not found on {Directory}", currentDir); + continue; } - foreach (var subDirectory in directory.EnumerateDirectories().OrderByNatural(d => d.FullName)) - { - // We need to check if the directory is not a blacklisted (ie __MACOSX) - if (Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(subDirectory.FullName)) continue; + try { + files = GetFilesWithCertainExtensions(currentDir, searchPattern) + .ToArray(); + } + catch (UnauthorizedAccessException e) { + logger.LogCritical(e, "Unauthorized access on a file in {Directory}", currentDir); + continue; + } + catch (DirectoryNotFoundException e) { + logger.LogCritical(e, "Directory not found on a file in {Directory}", currentDir); + continue; + } + catch (IOException e) { + logger.LogCritical(e, "IO exception on a file in {Directory}", currentDir); + continue; + } - FlattenDirectory(root, subDirectory, ref directoryIndex); + // Execute in parallel if there are enough files in the directory. + // Otherwise, execute sequentially. Files are opened and processed + // synchronously but this could be modified to perform async I/O. + try { + foreach (var file in files) { + action(file); + fileCount++; + } + } + catch (AggregateException ae) { + ae.Handle((ex) => { + if (ex is not UnauthorizedAccessException) return false; + // Here we just output a message and go on. + _logger.LogError(ex, "Unauthorized access on file"); + return true; + // Handle other exceptions here if necessary... + + }); + } + + // Push the subdirectories onto the stack for traversal. + // This could also be done before handing the files. + foreach (var str in subDirs) + dirs.Push(str); + } + + return fileCount; + } + + /// + /// Attempts to delete the files passed to it. Swallows exceptions. + /// + /// Full path of files to delete + public void DeleteFiles(IEnumerable files) + { + foreach (var file in files) + { + try + { + FileSystem.FileInfo.FromFileName(file).Delete(); + } + catch (Exception) + { + /* Swallow exception */ } } } + + /// + /// Returns the human-readable file size for an arbitrary, 64-bit file size + /// The default format is "0.## XB", e.g. "4.2 KB" or "1.43 GB" + /// + /// https://www.somacon.com/p576.php + /// + /// + public static string GetHumanReadableBytes(long bytes) + { + // Get absolute value + var absoluteBytes = (bytes < 0 ? -bytes : bytes); + // Determine the suffix and readable value + string suffix; + double readable; + switch (absoluteBytes) + { + // Exabyte + case >= 0x1000000000000000: + suffix = "EB"; + readable = (bytes >> 50); + break; + // Petabyte + case >= 0x4000000000000: + suffix = "PB"; + readable = (bytes >> 40); + break; + // Terabyte + case >= 0x10000000000: + suffix = "TB"; + readable = (bytes >> 30); + break; + // Gigabyte + case >= 0x40000000: + suffix = "GB"; + readable = (bytes >> 20); + break; + // Megabyte + case >= 0x100000: + suffix = "MB"; + readable = (bytes >> 10); + break; + // Kilobyte + case >= 0x400: + suffix = "KB"; + readable = bytes; + break; + default: + return bytes.ToString("0 B"); // Byte + } + // Divide by 1024 to get fractional value + readable = (readable / 1024); + // Return formatted number with suffix + return readable.ToString("0.## ") + suffix; + } + + /// + /// Removes all files except images from the directory. Includes sub directories. + /// + /// Fully qualified directory + public void RemoveNonImages(string directoryName) + { + DeleteFiles(GetFiles(directoryName, searchOption:SearchOption.AllDirectories).Where(file => !Tasks.Scanner.Parser.Parser.IsImage(file))); + } + + + /// + /// Flattens all files in subfolders to the passed directory recursively. + /// + /// + /// foo + /// ├── 1.txt + /// ├── 2.txt + /// ├── 3.txt + /// ├── 4.txt + /// └── bar + /// ├── 1.txt + /// ├── 2.txt + /// └── 5.txt + /// + /// becomes: + /// foo + /// ├── 1.txt + /// ├── 2.txt + /// ├── 3.txt + /// ├── 4.txt + /// ├── bar_1.txt + /// ├── bar_2.txt + /// └── bar_5.txt + /// + /// Fully qualified Directory name + public void Flatten(string directoryName) + { + if (string.IsNullOrEmpty(directoryName) || !FileSystem.Directory.Exists(directoryName)) return; + + var directory = FileSystem.DirectoryInfo.FromDirectoryName(directoryName); + + var index = 0; + FlattenDirectory(directory, directory, ref index); + } + + /// + /// Checks whether a directory has write permissions + /// + /// Fully qualified path + /// + public async Task CheckWriteAccess(string directoryName) + { + try + { + ExistOrCreate(directoryName); + await FileSystem.File.WriteAllTextAsync( + FileSystem.Path.Join(directoryName, "test.txt"), + string.Empty); + } + catch (Exception) + { + ClearAndDeleteDirectory(directoryName); + return false; + } + + ClearAndDeleteDirectory(directoryName); + return true; + } + + + private static void FlattenDirectory(IFileSystemInfo root, IDirectoryInfo directory, ref int directoryIndex) + { + if (!root.FullName.Equals(directory.FullName)) + { + var fileIndex = 1; + + foreach (var file in directory.EnumerateFiles().OrderByNatural(file => file.FullName)) + { + if (file.Directory == null) continue; + var paddedIndex = Tasks.Scanner.Parser.Parser.PadZeros(directoryIndex + ""); + // We need to rename the files so that after flattening, they are in the order we found them + var newName = $"{paddedIndex}_{Tasks.Scanner.Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}"; + var newPath = Path.Join(root.FullName, newName); + if (!File.Exists(newPath)) file.MoveTo(newPath); + fileIndex++; + } + + directoryIndex++; + } + + foreach (var subDirectory in directory.EnumerateDirectories().OrderByNatural(d => d.FullName)) + { + // We need to check if the directory is not a blacklisted (ie __MACOSX) + if (Tasks.Scanner.Parser.Parser.HasBlacklistedFolderInPath(subDirectory.FullName)) continue; + + FlattenDirectory(root, subDirectory, ref directoryIndex); + } + } } diff --git a/API/Services/DownloadService.cs b/API/Services/DownloadService.cs index c1591056a..a89a0988f 100644 --- a/API/Services/DownloadService.cs +++ b/API/Services/DownloadService.cs @@ -3,7 +3,6 @@ using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; -using API.Constants; using API.Entities; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.StaticFiles; @@ -14,17 +13,12 @@ public interface IDownloadService { Tuple GetFirstFileDownload(IEnumerable files); string GetContentTypeFromFile(string filepath); - Task HasDownloadPermission(AppUser user); } public class DownloadService : IDownloadService { - private readonly UserManager _userManager; private readonly FileExtensionContentTypeProvider _fileTypeProvider = new FileExtensionContentTypeProvider(); - public DownloadService(UserManager userManager) - { - _userManager = userManager; - } + public DownloadService() { } /// /// Downloads the first file in the file enumerable for download @@ -62,9 +56,5 @@ public class DownloadService : IDownloadService return contentType; } - public async Task HasDownloadPermission(AppUser user) - { - var roles = await _userManager.GetRolesAsync(user); - return roles.Contains(PolicyConstants.DownloadRole) || roles.Contains(PolicyConstants.AdminRole); - } + } diff --git a/API/Services/EmailService.cs b/API/Services/EmailService.cs index 819a0c77a..32823c178 100644 --- a/API/Services/EmailService.cs +++ b/API/Services/EmailService.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Generic; using System.Linq; using System.Net; using System.Threading.Tasks; @@ -20,23 +21,29 @@ public interface IEmailService Task CheckIfAccessible(string host); Task SendMigrationEmail(EmailMigrationDto data); Task SendPasswordResetEmail(PasswordResetEmailDto data); + Task SendFilesToEmail(SendToDto data); Task TestConnectivity(string emailUrl); + Task IsDefaultEmailService(); + Task SendEmailChangeEmail(ConfirmationEmailDto data); } public class EmailService : IEmailService { private readonly ILogger _logger; private readonly IUnitOfWork _unitOfWork; + private readonly IDownloadService _downloadService; /// /// This is used to initially set or reset the ServerSettingKey. Do not access from the code, access via UnitOfWork /// public const string DefaultApiUrl = "https://email.kavitareader.com"; - public EmailService(ILogger logger, IUnitOfWork unitOfWork) + public EmailService(ILogger logger, IUnitOfWork unitOfWork, IDownloadService downloadService) { _logger = logger; _unitOfWork = unitOfWork; + _downloadService = downloadService; + FlurlHttp.ConfigureClient(DefaultApiUrl, cli => cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); @@ -58,7 +65,7 @@ public class EmailService : IEmailService result.Successful = false; result.ErrorMessage = "This is a local IP address"; } - result.Successful = await SendEmailWithGet(emailUrl + "/api/email/test"); + result.Successful = await SendEmailWithGet(emailUrl + "/api/test"); } catch (KavitaException ex) { @@ -69,10 +76,26 @@ public class EmailService : IEmailService return result; } + public async Task IsDefaultEmailService() + { + return (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl)).Value + .Equals(DefaultApiUrl); + } + + public async Task SendEmailChangeEmail(ConfirmationEmailDto data) + { + var emailLink = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl)).Value; + var success = await SendEmailWithPost(emailLink + "/api/account/email-change", data); + if (!success) + { + _logger.LogError("There was a critical error sending Confirmation email"); + } + } + public async Task SendConfirmationEmail(ConfirmationEmailDto data) { var emailLink = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl)).Value; - var success = await SendEmailWithPost(emailLink + "/api/email/confirm", data); + var success = await SendEmailWithPost(emailLink + "/api/invite/confirm", data); if (!success) { _logger.LogError("There was a critical error sending Confirmation email"); @@ -85,7 +108,7 @@ public class EmailService : IEmailService try { if (IsLocalIpAddress(host)) return false; - return await SendEmailWithGet(DefaultApiUrl + "/api/email/reachable?host=" + host); + return await SendEmailWithGet(DefaultApiUrl + "/api/reachable?host=" + host); } catch (Exception) { @@ -96,24 +119,33 @@ public class EmailService : IEmailService public async Task SendMigrationEmail(EmailMigrationDto data) { var emailLink = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl)).Value; - return await SendEmailWithPost(emailLink + "/api/email/email-migration", data); + return await SendEmailWithPost(emailLink + "/api/invite/email-migration", data); } public async Task SendPasswordResetEmail(PasswordResetEmailDto data) { var emailLink = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl)).Value; - return await SendEmailWithPost(emailLink + "/api/email/email-password-reset", data); + return await SendEmailWithPost(emailLink + "/api/invite/email-password-reset", data); } - private static async Task SendEmailWithGet(string url, int timeoutSecs = 30) + public async Task SendFilesToEmail(SendToDto data) + { + if (await IsDefaultEmailService()) return false; + var emailLink = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.EmailServiceUrl)).Value; + return await SendEmailWithFiles(emailLink + "/api/sendto", data.FilePaths, data.DestinationEmail); + } + + private async Task SendEmailWithGet(string url, int timeoutSecs = 30) { try { + var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); var response = await (url) .WithHeader("Accept", "application/json") .WithHeader("User-Agent", "Kavita") .WithHeader("x-api-key", "MsnvA2DfQqxSK5jh") .WithHeader("x-kavita-version", BuildInfo.Version) + .WithHeader("x-kavita-installId", settings.InstallId) .WithHeader("Content-Type", "application/json") .WithTimeout(TimeSpan.FromSeconds(timeoutSecs)) .GetStringAsync(); @@ -131,26 +163,69 @@ public class EmailService : IEmailService } - private static async Task SendEmailWithPost(string url, object data, int timeoutSecs = 30) + private async Task SendEmailWithPost(string url, object data, int timeoutSecs = 30) { try { + var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); var response = await (url) .WithHeader("Accept", "application/json") .WithHeader("User-Agent", "Kavita") .WithHeader("x-api-key", "MsnvA2DfQqxSK5jh") .WithHeader("x-kavita-version", BuildInfo.Version) + .WithHeader("x-kavita-installId", settings.InstallId) .WithHeader("Content-Type", "application/json") .WithTimeout(TimeSpan.FromSeconds(timeoutSecs)) .PostJsonAsync(data); if (response.StatusCode != StatusCodes.Status200OK) { - return false; + var errorMessage = await response.GetStringAsync(); + throw new KavitaException(errorMessage); } } - catch (Exception) + catch (FlurlHttpException ex) { + _logger.LogError(ex, "There was an exception when interacting with Email Service"); + return false; + } + return true; + } + + + private async Task SendEmailWithFiles(string url, IEnumerable filePaths, string destEmail, int timeoutSecs = 300) + { + try + { + var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + var response = await (url) + .WithHeader("User-Agent", "Kavita") + .WithHeader("x-api-key", "MsnvA2DfQqxSK5jh") + .WithHeader("x-kavita-version", BuildInfo.Version) + .WithHeader("x-kavita-installId", settings.InstallId) + .WithTimeout(timeoutSecs) + .AllowHttpStatus("4xx") + .PostMultipartAsync(mp => + { + mp.AddString("email", destEmail); + var index = 1; + foreach (var filepath in filePaths) + { + mp.AddFile("file" + index, filepath, _downloadService.GetContentTypeFromFile(filepath)); + index++; + } + } + ); + + if (response.StatusCode != StatusCodes.Status200OK) + { + var errorMessage = await response.GetStringAsync(); + throw new KavitaException(errorMessage); + } + } + catch (FlurlHttpException ex) + { + _logger.LogError(ex, "There was an exception when sending Email for SendTo"); return false; } return true; diff --git a/API/Services/HostedServices/StartupTasksHostedService.cs b/API/Services/HostedServices/StartupTasksHostedService.cs index df7692c7c..43f181016 100644 --- a/API/Services/HostedServices/StartupTasksHostedService.cs +++ b/API/Services/HostedServices/StartupTasksHostedService.cs @@ -6,55 +6,54 @@ using API.Services.Tasks.Scanner; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; -namespace API.Services.HostedServices +namespace API.Services.HostedServices; + +public class StartupTasksHostedService : IHostedService { - public class StartupTasksHostedService : IHostedService + private readonly IServiceProvider _provider; + + public StartupTasksHostedService(IServiceProvider serviceProvider) { - private readonly IServiceProvider _provider; - - public StartupTasksHostedService(IServiceProvider serviceProvider) - { - _provider = serviceProvider; - } - - public async Task StartAsync(CancellationToken cancellationToken) - { - using var scope = _provider.CreateScope(); - - var taskScheduler = scope.ServiceProvider.GetRequiredService(); - await taskScheduler.ScheduleTasks(); - taskScheduler.ScheduleUpdaterTasks(); - - - - try - { - // These methods will automatically check if stat collection is disabled to prevent sending any data regardless - // of when setting was changed - await taskScheduler.ScheduleStatsTasks(); - await taskScheduler.RunStatCollection(); - } - catch (Exception) - { - //If stats startup fail the user can keep using the app - } - - try - { - var unitOfWork = scope.ServiceProvider.GetRequiredService(); - if ((await unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableFolderWatching) - { - var libraryWatcher = scope.ServiceProvider.GetRequiredService(); - await libraryWatcher.StartWatching(); - } - } - catch (Exception) - { - // Fail silently - } - - } - - public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; + _provider = serviceProvider; } + + public async Task StartAsync(CancellationToken cancellationToken) + { + using var scope = _provider.CreateScope(); + + var taskScheduler = scope.ServiceProvider.GetRequiredService(); + await taskScheduler.ScheduleTasks(); + taskScheduler.ScheduleUpdaterTasks(); + + + + try + { + // These methods will automatically check if stat collection is disabled to prevent sending any data regardless + // of when setting was changed + await taskScheduler.ScheduleStatsTasks(); + await taskScheduler.RunStatCollection(); + } + catch (Exception) + { + //If stats startup fail the user can keep using the app + } + + try + { + var unitOfWork = scope.ServiceProvider.GetRequiredService(); + if ((await unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableFolderWatching) + { + var libraryWatcher = scope.ServiceProvider.GetRequiredService(); + await libraryWatcher.StartWatching(); + } + } + catch (Exception) + { + // Fail silently + } + + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; } diff --git a/API/Services/ImageService.cs b/API/Services/ImageService.cs index 1d1271ad5..bebb40d93 100644 --- a/API/Services/ImageService.cs +++ b/API/Services/ImageService.cs @@ -128,7 +128,7 @@ public class ImageService : IImageService return true; } - catch (Exception ex) + catch (Exception) { /* Swallow Exception */ } diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index 0dd980a59..6be15bf8e 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -8,6 +8,7 @@ using API.Data; using API.Data.Metadata; using API.Data.Repositories; using API.Data.Scanner; +using API.DTOs.Metadata; using API.Entities; using API.Entities.Enums; using API.Extensions; @@ -51,7 +52,6 @@ public class MetadataService : IMetadataService private readonly ICacheHelper _cacheHelper; private readonly IReadingItemService _readingItemService; private readonly IDirectoryService _directoryService; - private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); private readonly IList _updateEvents = new List(); public MetadataService(IUnitOfWork unitOfWork, ILogger logger, IEventHub eventHub, ICacheHelper cacheHelper, @@ -89,7 +89,7 @@ public class MetadataService : IMetadataService private void UpdateChapterLastModified(Chapter chapter, bool forceUpdate) { var firstFile = chapter.Files.MinBy(x => x.Chapter); - if (firstFile == null || _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, forceUpdate, firstFile)) return; + if (firstFile == null || _cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, forceUpdate, firstFile)) return; firstFile.UpdateLastModified(); } @@ -108,7 +108,7 @@ public class MetadataService : IMetadataService volume.Chapters ??= new List(); - var firstChapter = volume.Chapters.MinBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting); + var firstChapter = volume.Chapters.MinBy(x => double.Parse(x.Number), ChapterSortComparerZeroFirst.Default); if (firstChapter == null) return Task.FromResult(false); volume.CoverImage = firstChapter.CoverImage; @@ -131,23 +131,8 @@ public class MetadataService : IMetadataService return Task.CompletedTask; series.Volumes ??= new List(); - var firstCover = series.Volumes.GetCoverImage(series.Format); - string coverImage = null; - if (firstCover == null && series.Volumes.Any()) - { - // If firstCover is null and one volume, the whole series is Chapters under Vol 0. - if (series.Volumes.Count == 1) - { - coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting) - .FirstOrDefault(c => !c.IsSpecial)?.CoverImage; - } + series.CoverImage = series.GetCoverImage(); - if (!_cacheHelper.CoverImageExists(coverImage)) - { - coverImage = series.Volumes[0].Chapters.MinBy(c => double.Parse(c.Number), _chapterSortComparerForInChapterSorting)?.CoverImage; - } - } - series.CoverImage = firstCover?.CoverImage ?? coverImage; _updateEvents.Add(MessageFactory.CoverUpdateEvent(series.Id, MessageFactoryEntityTypes.Series)); return Task.CompletedTask; } diff --git a/API/Services/ReaderService.cs b/API/Services/ReaderService.cs index 197731a8b..fcb111d98 100644 --- a/API/Services/ReaderService.cs +++ b/API/Services/ReaderService.cs @@ -21,10 +21,11 @@ public interface IReaderService { Task MarkSeriesAsRead(AppUser user, int seriesId); Task MarkSeriesAsUnread(AppUser user, int seriesId); - Task MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable chapters); - Task MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable chapters); + Task MarkChaptersAsRead(AppUser user, int seriesId, IList chapters); + Task MarkChaptersAsUnread(AppUser user, int seriesId, IList chapters); Task SaveReadingProgress(ProgressDto progressDto, int userId); Task CapPageToChapter(int chapterId, int page); + int CapPageToChapter(Chapter chapter, int page); Task GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId); Task GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId); Task GetContinuePoint(int seriesId, int userId); @@ -39,8 +40,8 @@ public class ReaderService : IReaderService private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; private readonly IEventHub _eventHub; - private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); - private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); + private readonly ChapterSortComparer _chapterSortComparer = ChapterSortComparer.Default; + private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = ChapterSortComparerZeroFirst.Default; private const float MinWordsPerHour = 10260F; private const float MaxWordsPerHour = 30000F; @@ -75,8 +76,6 @@ public class ReaderService : IReaderService { await MarkChaptersAsRead(user, seriesId, volume.Chapters); } - - _unitOfWork.UserRepository.Update(user); } /// @@ -92,18 +91,18 @@ public class ReaderService : IReaderService { await MarkChaptersAsUnread(user, seriesId, volume.Chapters); } - - _unitOfWork.UserRepository.Update(user); } /// /// Marks all Chapters as Read by creating or updating UserProgress rows. Does not commit. /// + /// Emits events to the UI for each chapter progress and one for each volume progress /// /// /// - public async Task MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable chapters) + public async Task MarkChaptersAsRead(AppUser user, int seriesId, IList chapters) { + var seenVolume = new Dictionary(); foreach (var chapter in chapters) { var userProgress = GetUserProgressForChapter(user, chapter); @@ -117,19 +116,29 @@ public class ReaderService : IReaderService SeriesId = seriesId, ChapterId = chapter.Id }); - await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, - MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId, chapter.VolumeId, chapter.Id, chapter.Pages)); } else { userProgress.PagesRead = chapter.Pages; userProgress.SeriesId = seriesId; userProgress.VolumeId = chapter.VolumeId; - - await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, - MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, userProgress.SeriesId, userProgress.VolumeId, userProgress.ChapterId, chapter.Pages)); } + + await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, + MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId, chapter.VolumeId, chapter.Id, chapter.Pages)); + + // Send out volume events for each distinct volume + if (!seenVolume.ContainsKey(chapter.VolumeId)) + { + seenVolume[chapter.VolumeId] = true; + await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, + MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId, + chapter.VolumeId, 0, chapters.Where(c => c.VolumeId == chapter.VolumeId).Sum(c => c.Pages))); + } + } + + _unitOfWork.UserRepository.Update(user); } /// @@ -138,8 +147,9 @@ public class ReaderService : IReaderService /// /// /// - public async Task MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable chapters) + public async Task MarkChaptersAsUnread(AppUser user, int seriesId, IList chapters) { + var seenVolume = new Dictionary(); foreach (var chapter in chapters) { var userProgress = GetUserProgressForChapter(user, chapter); @@ -152,7 +162,17 @@ public class ReaderService : IReaderService await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, userProgress.SeriesId, userProgress.VolumeId, userProgress.ChapterId, 0)); + + // Send out volume events for each distinct volume + if (!seenVolume.ContainsKey(chapter.VolumeId)) + { + seenVolume[chapter.VolumeId] = true; + await _eventHub.SendMessageAsync(MessageFactory.UserProgressUpdate, + MessageFactory.UserProgressUpdateEvent(user.Id, user.UserName, seriesId, + chapter.VolumeId, 0, 0)); + } } + _unitOfWork.UserRepository.Update(user); } /// @@ -273,6 +293,21 @@ public class ReaderService : IReaderService return page; } + public int CapPageToChapter(Chapter chapter, int page) + { + if (page > chapter.Pages) + { + page = chapter.Pages; + } + + if (page < 0) + { + page = 0; + } + + return page; + } + /// /// Tries to find the next logical Chapter /// @@ -297,19 +332,29 @@ public class ReaderService : IReaderService if (chapterId > 0) return chapterId; } + var currentVolumeNumber = float.Parse(currentVolume.Name); + var next = false; foreach (var volume in volumes) { - if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1) + var volumeNumbersMatch = Math.Abs(float.Parse(volume.Name) - currentVolumeNumber) < 0.00001f; + if (volumeNumbersMatch && volume.Chapters.Count > 1) { // Handle Chapters within current Volume // In this case, i need 0 first because 0 represents a full volume file. var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Range, dto => dto.Range); if (chapterId > 0) return chapterId; - + next = true; + continue; } - if (volume.Number != currentVolume.Number + 1) continue; + if (volumeNumbersMatch) + { + next = true; + continue; + } + + if (!next) continue; // Handle Chapters within next Volume // ! When selecting the chapter for the next volume, we need to make sure a c0 comes before a c1+ @@ -373,6 +418,7 @@ public class ReaderService : IReaderService if (chapterId > 0) return chapterId; } + var next = false; foreach (var volume in volumes) { if (volume.Number == currentVolume.Number) @@ -380,8 +426,10 @@ public class ReaderService : IReaderService var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).Reverse(), currentChapter.Range, dto => dto.Range); if (chapterId > 0) return chapterId; + next = true; // When the diff between volumes is more than 1, we need to explicitly tell that next volume is our use case + continue; } - if (volume.Number == currentVolume.Number - 1) + if (next) { if (currentVolume.Number - 1 == 0) break; // If we have walked all the way to chapter volume, then we should break so logic outside can work var lastChapter = volume.Chapters.MaxBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting); @@ -497,7 +545,7 @@ public class ReaderService : IReaderService var chapters = volume.Chapters .OrderBy(c => float.Parse(c.Number)) .Where(c => !c.IsSpecial && Tasks.Scanner.Parser.Parser.MaxNumberFromRange(c.Range) <= chapterNumber); - await MarkChaptersAsRead(user, volume.SeriesId, chapters); + await MarkChaptersAsRead(user, volume.SeriesId, chapters.ToList()); } } diff --git a/API/Services/ReadingItemService.cs b/API/Services/ReadingItemService.cs index 8e4676639..3f2122a08 100644 --- a/API/Services/ReadingItemService.cs +++ b/API/Services/ReadingItemService.cs @@ -2,6 +2,7 @@ using API.Data.Metadata; using API.Entities.Enums; using API.Parser; +using API.Services.Tasks.Scanner.Parser; namespace API.Services; @@ -71,8 +72,7 @@ public class ReadingItemService : IReadingItemService // This catches when original library type is Manga/Comic and when parsing with non if (Tasks.Scanner.Parser.Parser.IsEpub(path) && Tasks.Scanner.Parser.Parser.ParseVolume(info.Series) != Tasks.Scanner.Parser.Parser.DefaultVolume) // Shouldn't this be info.Volume != DefaultVolume? { - info = _defaultParser.Parse(path, rootPath, LibraryType.Book); - var info2 = Parse(path, rootPath, type); + var info2 = _defaultParser.Parse(path, rootPath, LibraryType.Book); info.Merge(info2); } diff --git a/API/Services/ReadingListService.cs b/API/Services/ReadingListService.cs index 60314e3a9..55c842252 100644 --- a/API/Services/ReadingListService.cs +++ b/API/Services/ReadingListService.cs @@ -17,7 +17,7 @@ public interface IReadingListService Task DeleteReadingListItem(UpdateReadingListPosition dto); Task UserHasReadingListAccess(int readingListId, string username); Task DeleteReadingList(int readingListId, AppUser user); - + Task CalculateReadingListAgeRating(ReadingList readingList); Task AddChaptersToReadingList(int seriesId, IList chapterIds, ReadingList readingList); } @@ -41,7 +41,7 @@ public class ReadingListService : IReadingListService /// - /// Removes all entries that are fully read from the reading list + /// Removes all entries that are fully read from the reading list. This commits /// /// If called from API layer, expected for to be called beforehand /// Reading List Id @@ -62,10 +62,12 @@ public class ReadingListService : IReadingListService itemIdsToRemove.Contains(r.Id)); _unitOfWork.ReadingListRepository.BulkRemove(listItems); + var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(readingListId); + await CalculateReadingListAgeRating(readingList); + if (!_unitOfWork.HasChanges()) return true; - await _unitOfWork.CommitAsync(); - return true; + return await _unitOfWork.CommitAsync(); } catch { @@ -97,6 +99,11 @@ public class ReadingListService : IReadingListService return await _unitOfWork.CommitAsync(); } + /// + /// Removes a certain reading list item from a reading list + /// + /// Only ReadingListId and ReadingListItemId are used + /// public async Task DeleteReadingListItem(UpdateReadingListPosition dto) { var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(dto.ReadingListId); @@ -109,11 +116,34 @@ public class ReadingListService : IReadingListService index++; } + await CalculateReadingListAgeRating(readingList); + if (!_unitOfWork.HasChanges()) return true; return await _unitOfWork.CommitAsync(); } + /// + /// Calculates the highest Age Rating from each Reading List Item + /// + /// + public async Task CalculateReadingListAgeRating(ReadingList readingList) + { + await CalculateReadingListAgeRating(readingList, readingList.Items.Select(i => i.SeriesId)); + } + + /// + /// Calculates the highest Age Rating from each Reading List Item + /// + /// This method is used when the ReadingList doesn't have items yet + /// + /// The series ids of all the reading list items + private async Task CalculateReadingListAgeRating(ReadingList readingList, IEnumerable seriesIds) + { + var ageRating = await _unitOfWork.SeriesRepository.GetMaxAgeRatingFromSeriesAsync(seriesIds); + readingList.AgeRating = ageRating; + } + /// /// Validates the user has access to the reading list to perform actions on it /// @@ -167,16 +197,18 @@ public class ReadingListService : IReadingListService var existingChapterExists = readingList.Items.Select(rli => rli.ChapterId).ToHashSet(); var chaptersForSeries = (await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds)) .OrderBy(c => Tasks.Scanner.Parser.Parser.MinNumberFromRange(c.Volume.Name)) - .ThenBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting); + .ThenBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting) + .ToList(); var index = lastOrder + 1; - foreach (var chapter in chaptersForSeries) + foreach (var chapter in chaptersForSeries.Where(chapter => !existingChapterExists.Contains(chapter.Id))) { - if (existingChapterExists.Contains(chapter.Id)) continue; readingList.Items.Add(DbFactory.ReadingListItem(index, seriesId, chapter.VolumeId, chapter.Id)); index += 1; } + await CalculateReadingListAgeRating(readingList, new []{ seriesId }); + return index > lastOrder + 1; } } diff --git a/API/Services/SeriesService.cs b/API/Services/SeriesService.cs index 471cb2b16..bba9876d2 100644 --- a/API/Services/SeriesService.cs +++ b/API/Services/SeriesService.cs @@ -5,14 +5,17 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; using API.Data; +using API.Data.Repositories; using API.DTOs; using API.DTOs.CollectionTags; using API.DTOs.Metadata; using API.DTOs.SeriesDetail; using API.Entities; using API.Entities.Enums; +using API.Entities.Metadata; using API.Helpers; using API.SignalR; +using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; namespace API.Services; @@ -24,7 +27,8 @@ public interface ISeriesService Task UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto); Task UpdateRating(AppUser user, UpdateSeriesRatingDto updateSeriesRatingDto); Task DeleteMultipleSeries(IList seriesIds); - + Task UpdateRelatedSeries(UpdateRelatedSeriesDto dto); + Task GetRelatedSeries(int userId, int seriesId); } public class SeriesService : ISeriesService @@ -75,6 +79,12 @@ public class SeriesService : ISeriesService series.Metadata.AgeRatingLocked = true; } + if (updateSeriesMetadataDto.SeriesMetadata.ReleaseYear > 1000 && series.Metadata.ReleaseYear != updateSeriesMetadataDto.SeriesMetadata.ReleaseYear) + { + series.Metadata.ReleaseYear = updateSeriesMetadataDto.SeriesMetadata.ReleaseYear; + series.Metadata.ReleaseYearLocked = true; + } + if (series.Metadata.PublicationStatus != updateSeriesMetadataDto.SeriesMetadata.PublicationStatus) { series.Metadata.PublicationStatus = updateSeriesMetadataDto.SeriesMetadata.PublicationStatus; @@ -166,6 +176,7 @@ public class SeriesService : ISeriesService series.Metadata.CoverArtistLocked = updateSeriesMetadataDto.SeriesMetadata.CoverArtistsLocked; series.Metadata.WriterLocked = updateSeriesMetadataDto.SeriesMetadata.WritersLocked; series.Metadata.SummaryLocked = updateSeriesMetadataDto.SeriesMetadata.SummaryLocked; + series.Metadata.ReleaseYearLocked = updateSeriesMetadataDto.SeriesMetadata.ReleaseYearLocked; if (!_unitOfWork.HasChanges()) { @@ -462,6 +473,17 @@ public class SeriesService : ISeriesService public async Task GetSeriesDetail(int seriesId, int userId) { var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); + var libraryIds = (await _unitOfWork.LibraryRepository.GetLibraryIdsForUserIdAsync(userId)); + if (!libraryIds.Contains(series.LibraryId)) + throw new UnauthorizedAccessException("User does not have access to the library this series belongs to"); + + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + if (user.AgeRestriction != AgeRating.NotApplicable) + { + var seriesMetadata = await _unitOfWork.SeriesRepository.GetSeriesMetadata(seriesId); + if (seriesMetadata.AgeRating > user.AgeRestriction) + throw new UnauthorizedAccessException("User is not allowed to view this series due to age restrictions"); + } var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId); var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)) @@ -605,4 +627,76 @@ public class SeriesService : ISeriesService _ => "Chapter" }; } + + /// + /// Returns all related series against the passed series Id + /// + /// + /// + /// + public async Task GetRelatedSeries(int userId, int seriesId) + { + return await _unitOfWork.SeriesRepository.GetRelatedSeries(userId, seriesId); + } + + /// + /// Update the relations attached to the Series. Does not generate associated Sequel/Prequel pairs on target series. + /// + /// + /// + public async Task UpdateRelatedSeries(UpdateRelatedSeriesDto dto) + { + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(dto.SeriesId, SeriesIncludes.Related); + + UpdateRelationForKind(dto.Adaptations, series.Relations.Where(r => r.RelationKind == RelationKind.Adaptation).ToList(), series, RelationKind.Adaptation); + UpdateRelationForKind(dto.Characters, series.Relations.Where(r => r.RelationKind == RelationKind.Character).ToList(), series, RelationKind.Character); + UpdateRelationForKind(dto.Contains, series.Relations.Where(r => r.RelationKind == RelationKind.Contains).ToList(), series, RelationKind.Contains); + UpdateRelationForKind(dto.Others, series.Relations.Where(r => r.RelationKind == RelationKind.Other).ToList(), series, RelationKind.Other); + UpdateRelationForKind(dto.SideStories, series.Relations.Where(r => r.RelationKind == RelationKind.SideStory).ToList(), series, RelationKind.SideStory); + UpdateRelationForKind(dto.SpinOffs, series.Relations.Where(r => r.RelationKind == RelationKind.SpinOff).ToList(), series, RelationKind.SpinOff); + UpdateRelationForKind(dto.AlternativeSettings, series.Relations.Where(r => r.RelationKind == RelationKind.AlternativeSetting).ToList(), series, RelationKind.AlternativeSetting); + UpdateRelationForKind(dto.AlternativeVersions, series.Relations.Where(r => r.RelationKind == RelationKind.AlternativeVersion).ToList(), series, RelationKind.AlternativeVersion); + UpdateRelationForKind(dto.Doujinshis, series.Relations.Where(r => r.RelationKind == RelationKind.Doujinshi).ToList(), series, RelationKind.Doujinshi); + UpdateRelationForKind(dto.Prequels, series.Relations.Where(r => r.RelationKind == RelationKind.Prequel).ToList(), series, RelationKind.Prequel); + UpdateRelationForKind(dto.Sequels, series.Relations.Where(r => r.RelationKind == RelationKind.Sequel).ToList(), series, RelationKind.Sequel); + UpdateRelationForKind(dto.Editions, series.Relations.Where(r => r.RelationKind == RelationKind.Edition).ToList(), series, RelationKind.Edition); + + if (!_unitOfWork.HasChanges()) return true; + return await _unitOfWork.CommitAsync(); + } + + + /// + /// Applies the provided list to the series. Adds new relations and removes deleted relations. + /// + /// + /// + /// + /// + private void UpdateRelationForKind(ICollection dtoTargetSeriesIds, IEnumerable adaptations, Series series, RelationKind kind) + { + foreach (var adaptation in adaptations.Where(adaptation => !dtoTargetSeriesIds.Contains(adaptation.TargetSeriesId))) + { + // If the seriesId isn't in dto, it means we've removed or reclassified + series.Relations.Remove(adaptation); + } + + // At this point, we only have things to add + foreach (var targetSeriesId in dtoTargetSeriesIds) + { + // This ensures we don't allow any duplicates to be added + if (series.Relations.SingleOrDefault(r => + r.RelationKind == kind && r.TargetSeriesId == targetSeriesId) != + null) continue; + + series.Relations.Add(new SeriesRelation() + { + Series = series, + SeriesId = series.Id, + TargetSeriesId = targetSeriesId, + RelationKind = kind + }); + _unitOfWork.SeriesRepository.Update(series); + } + } } diff --git a/API/Services/TachiyomiService.cs b/API/Services/TachiyomiService.cs new file mode 100644 index 000000000..23f57562d --- /dev/null +++ b/API/Services/TachiyomiService.cs @@ -0,0 +1,158 @@ +using System; +using API.DTOs; +using System.Threading.Tasks; +using API.Data; +using System.Collections.Immutable; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using API.Comparators; +using API.Entities; +using AutoMapper; +using Microsoft.Extensions.Logging; + +namespace API.Services; + +public interface ITachiyomiService +{ + Task GetLatestChapter(int seriesId, int userId); + Task MarkChaptersUntilAsRead(AppUser userWithProgress, int seriesId, float chapterNumber); +} + +/// +/// All APIs are for Tachiyomi extension and app. They have hacks for our implementation and should not be used for any +/// other purposes. +/// +public class TachiyomiService : ITachiyomiService +{ + private readonly IUnitOfWork _unitOfWork; + private readonly IMapper _mapper; + private readonly ILogger _logger; + private readonly IReaderService _readerService; + + private static readonly CultureInfo EnglishCulture = CultureInfo.CreateSpecificCulture("en-US"); + + public TachiyomiService(IUnitOfWork unitOfWork, IMapper mapper, ILogger logger, IReaderService readerService) + { + _unitOfWork = unitOfWork; + _readerService = readerService; + _mapper = mapper; + _logger = logger; + } + + /// + /// Gets the latest chapter/volume read. + /// + /// + /// + /// Due to how Tachiyomi works we need a hack to properly return both chapters and volumes. + /// If its a chapter, return the chapterDto as is. + /// If it's a volume, the volume number gets returned in the 'Number' attribute of a chapterDto encoded. + /// The volume number gets divided by 10,000 because that's how Tachiyomi interprets volumes + public async Task GetLatestChapter(int seriesId, int userId) + { + + + var currentChapter = await _readerService.GetContinuePoint(seriesId, userId); + + var prevChapterId = + await _readerService.GetPrevChapterIdAsync(seriesId, currentChapter.VolumeId, currentChapter.Id, userId); + + // If prevChapterId is -1, this means either nothing is read or everything is read. + if (prevChapterId == -1) + { + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); + var userHasProgress = series.PagesRead != 0 && series.PagesRead <= series.Pages; + + // If the user doesn't have progress, then return null, which the extension will catch as 204 (no content) and report nothing as read + if (!userHasProgress) return null; + + // Else return the max chapter to Tachiyomi so it can consider everything read + var volumes = (await _unitOfWork.VolumeRepository.GetVolumes(seriesId)).ToImmutableList(); + var looseLeafChapterVolume = volumes.FirstOrDefault(v => v.Number == 0); + if (looseLeafChapterVolume == null) + { + var volumeChapter = _mapper.Map(volumes.Last().Chapters.OrderBy(c => float.Parse(c.Number), ChapterSortComparerZeroFirst.Default).Last()); + if (volumeChapter.Number == "0") + { + var volume = volumes.First(v => v.Id == volumeChapter.VolumeId); + return new ChapterDto() + { + // Use R to ensure that localization of underlying system doesn't affect the stringification + // https://docs.microsoft.com/en-us/globalization/locale/number-formatting-in-dotnet-framework + Number = (volume.Number / 10_000f).ToString("R", EnglishCulture) + }; + } + + return new ChapterDto() + { + Number = (int.Parse(volumeChapter.Number) / 10_000f).ToString("R", EnglishCulture) + }; + } + + var lastChapter = looseLeafChapterVolume.Chapters.OrderBy(c => float.Parse(c.Number), ChapterSortComparer.Default).Last(); + return _mapper.Map(lastChapter); + } + + // There is progress, we now need to figure out the highest volume or chapter and return that. + var prevChapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(prevChapterId); + var volumeWithProgress = await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(prevChapter.VolumeId, userId); + // We only encode for single-file volumes + if (volumeWithProgress.Number != 0 && volumeWithProgress.Chapters.Count == 1) + { + // The progress is on a volume, encode it as a fake chapterDTO + return new ChapterDto() + { + // Use R to ensure that localization of underlying system doesn't affect the stringification + // https://docs.microsoft.com/en-us/globalization/locale/number-formatting-in-dotnet-framework + Number = (volumeWithProgress.Number / 10_000f).ToString("R", EnglishCulture) + + }; + } + + // Progress is just on a chapter, return as is + return prevChapter; + } + + /// + /// Marks every chapter and volume that is sorted below the passed number as Read. This will not mark any specials as read. + /// Passed number will also be marked as read + /// + /// + /// + /// Can also be a Tachiyomi encoded volume number + public async Task MarkChaptersUntilAsRead(AppUser userWithProgress, int seriesId, float chapterNumber) + { + userWithProgress.Progresses ??= new List(); + + switch (chapterNumber) + { + // When Tachiyomi sync's progress, if there is no current progress in Tachiyomi, 0.0f is sent. + // Due to the encoding for volumes, this marks all chapters in volume 0 (loose chapters) as read. + // Hence we catch and return early, so we ignore the request. + case 0.0f: + return true; + case < 1.0f: + { + // This is a hack to track volume number. We need to map it back by x10,000 + var volumeNumber = int.Parse($"{(int)(chapterNumber * 10_000)}", EnglishCulture); + await _readerService.MarkVolumesUntilAsRead(userWithProgress, seriesId, volumeNumber); + break; + } + default: + await _readerService.MarkChaptersUntilAsRead(userWithProgress, seriesId, chapterNumber); + break; + } + + try { + _unitOfWork.UserRepository.Update(userWithProgress); + + if (!_unitOfWork.HasChanges()) return true; + if (await _unitOfWork.CommitAsync()) return true; + } catch (Exception ex) { + _logger.LogError(ex, "There was an error saving progress from tachiyomi"); + await _unitOfWork.RollbackAsync(); + } + return false; + } +} diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index affbec32b..d225b3b99 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Collections.Immutable; using System.Linq; using System.Threading; using System.Threading.Tasks; @@ -8,7 +9,6 @@ using API.Entities.Enums; using API.Helpers.Converters; using API.Services.Tasks; using API.Services.Tasks.Metadata; -using API.Services.Tasks.Scanner; using Hangfire; using Microsoft.Extensions.Logging; @@ -19,6 +19,7 @@ public interface ITaskScheduler Task ScheduleTasks(); Task ScheduleStatsTasks(); void ScheduleUpdaterTasks(); + void ScanFolder(string folderPath, TimeSpan delay); void ScanFolder(string folderPath); void ScanLibrary(int libraryId, bool force = false); void CleanupChapters(int[] chapterIds); @@ -49,9 +50,14 @@ public class TaskScheduler : ITaskScheduler public static BackgroundJobServer Client => new BackgroundJobServer(); public const string ScanQueue = "scan"; public const string DefaultQueue = "default"; + public const string RemoveFromWantToReadTaskId = "remove-from-want-to-read"; + public const string CleanupDbTaskId = "cleanup-db"; + public const string CleanupTaskId = "cleanup"; + public const string BackupTaskId = "backup"; + public const string ScanLibrariesTaskId = "scan-libraries"; + public const string ReportStatsTaskId = "report-stats"; - public static readonly IList ScanTasks = new List() - {"ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"}; + private static readonly ImmutableArray ScanTasks = ImmutableArray.Create("ScannerService", "ScanLibrary", "ScanLibraries", "ScanFolder", "ScanSeries"); private static readonly Random Rnd = new Random(); @@ -83,27 +89,28 @@ public class TaskScheduler : ITaskScheduler { var scanLibrarySetting = setting; _logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting); - RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), + RecurringJob.AddOrUpdate(ScanLibrariesTaskId, () => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(scanLibrarySetting), TimeZoneInfo.Local); } else { - RecurringJob.AddOrUpdate("scan-libraries", () => ScanLibraries(), Cron.Daily, TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(ScanLibrariesTaskId, () => ScanLibraries(), Cron.Daily, TimeZoneInfo.Local); } setting = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Value; if (setting != null) { _logger.LogDebug("Scheduling Backup Task for {Setting}", setting); - RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(BackupTaskId, () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local); } else { - RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(BackupTaskId, () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local); } - RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local); - RecurringJob.AddOrUpdate("cleanup-db", () => _cleanupService.CleanupDbEntries(), Cron.Daily, TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(CleanupTaskId, () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(CleanupDbTaskId, () => _cleanupService.CleanupDbEntries(), Cron.Daily, TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(RemoveFromWantToReadTaskId, () => _cleanupService.CleanupWantToRead(), Cron.Daily, TimeZoneInfo.Local); } #region StatsTasks @@ -119,7 +126,7 @@ public class TaskScheduler : ITaskScheduler } _logger.LogDebug("Scheduling stat collection daily"); - RecurringJob.AddOrUpdate("report-stats", () => _statsService.Send(), Cron.Daily(Rnd.Next(0, 22)), TimeZoneInfo.Local); + RecurringJob.AddOrUpdate(ReportStatsTaskId, () => _statsService.Send(), Cron.Daily(Rnd.Next(0, 22)), TimeZoneInfo.Local); } public void AnalyzeFilesForLibrary(int libraryId, bool forceUpdate = false) @@ -127,11 +134,14 @@ public class TaskScheduler : ITaskScheduler BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(libraryId, forceUpdate)); } + /// + /// Upon cancelling stat, we do report to the Stat service that we are no longer going to be reporting + /// public void CancelStatsTasks() { - _logger.LogDebug("Cancelling/Removing StatsTasks"); - - RecurringJob.RemoveIfExists("report-stats"); + _logger.LogDebug("Stopping Stat collection as user has opted out"); + RecurringJob.RemoveIfExists(ReportStatsTaskId); + _statsService.SendCancellation(); } /// @@ -150,11 +160,16 @@ public class TaskScheduler : ITaskScheduler public void ScanSiteThemes() { - _logger.LogInformation("Starting Site Theme scan"); + if (HasAlreadyEnqueuedTask("ThemeService", "Scan", Array.Empty(), ScanQueue)) + { + _logger.LogInformation("A Theme Scan is already running"); + return; + } + + _logger.LogInformation("Enqueueing Site Theme scan"); BackgroundJob.Enqueue(() => _themeService.Scan()); } - #endregion #region UpdateTasks @@ -166,9 +181,32 @@ public class TaskScheduler : ITaskScheduler RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Daily(Rnd.Next(12, 18)), TimeZoneInfo.Local); } + public void ScanFolder(string folderPath, TimeSpan delay) + { + var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath); + if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] { normalizedFolder })) + { + _logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued", + normalizedFolder); + return; + } + + _logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder); + BackgroundJob.Schedule(() => _scannerService.ScanFolder(normalizedFolder), delay); + } + public void ScanFolder(string folderPath) { - _scannerService.ScanFolder(Tasks.Scanner.Parser.Parser.NormalizePath(folderPath)); + var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath); + if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {normalizedFolder})) + { + _logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued", + normalizedFolder); + return; + } + + _logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder); + _scannerService.ScanFolder(normalizedFolder); } #endregion @@ -186,17 +224,14 @@ public class TaskScheduler : ITaskScheduler public void ScanLibrary(int libraryId, bool force = false) { - var alreadyEnqueued = - HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) || - HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue); - if (alreadyEnqueued) + if (HasScanTaskRunningForLibrary(libraryId)) { - _logger.LogInformation("A duplicate request to scan library for library occured. Skipping"); + _logger.LogInformation("A duplicate request for Library Scan on library {LibraryId} occured. Skipping", libraryId); return; } if (RunningAnyTasksByMethod(ScanTasks, ScanQueue)) { - _logger.LogInformation("A Scan is already running, rescheduling ScanLibrary in 3 hours"); + _logger.LogInformation("A Library Scan is already running, rescheduling ScanLibrary in 3 hours"); BackgroundJob.Schedule(() => ScanLibrary(libraryId, force), TimeSpan.FromHours(3)); return; } @@ -204,7 +239,7 @@ public class TaskScheduler : ITaskScheduler _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force)); // When we do a scan, force cache to re-unpack in case page numbers change - BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory()); + BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheAndTempDirectories()); } public void CleanupChapters(int[] chapterIds) @@ -285,34 +320,84 @@ public class TaskScheduler : ITaskScheduler await _versionUpdaterService.PushUpdate(update); } - public static bool HasScanTaskRunningForLibrary(int libraryId) + /// + /// If there is an enqueued or scheduled task for method + /// + /// + /// Checks against jobs currently executing as well + /// + public static bool HasScanTaskRunningForLibrary(int libraryId, bool checkRunningJobs = true) { return - HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) || - HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue); + HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true}, ScanQueue, checkRunningJobs) || + HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false}, ScanQueue, checkRunningJobs); } /// - /// Checks if this same invocation is already enqueued + /// If there is an enqueued or scheduled task for method + /// + /// + /// Checks against jobs currently executing as well + /// + public static bool HasScanTaskRunningForSeries(int seriesId, bool checkRunningJobs = true) + { + return + HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, true}, ScanQueue, checkRunningJobs) || + HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, false}, ScanQueue, checkRunningJobs); + } + + /// + /// Checks if this same invocation is already enqueued or scheduled /// /// Method name that was enqueued /// Class name the method resides on /// object[] of arguments in the order they are passed to enqueued job /// Queue to check against. Defaults to "default" + /// Check against running jobs. Defaults to false. /// - public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue) + public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue, bool checkRunningJobs = false) { var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue); - return enqueuedJobs.Any(j => j.Value.InEnqueuedState && + var ret = enqueuedJobs.Any(j => j.Value.InEnqueuedState && j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) && j.Value.Job.Method.Name.Equals(methodName) && j.Value.Job.Method.DeclaringType.Name.Equals(className)); + if (ret) return true; + + var scheduledJobs = JobStorage.Current.GetMonitoringApi().ScheduledJobs(0, int.MaxValue); + ret = scheduledJobs.Any(j => + j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) && + j.Value.Job.Method.Name.Equals(methodName) && + j.Value.Job.Method.DeclaringType.Name.Equals(className)); + + if (ret) return true; + + if (checkRunningJobs) + { + var runningJobs = JobStorage.Current.GetMonitoringApi().ProcessingJobs(0, int.MaxValue); + return runningJobs.Any(j => + j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) && + j.Value.Job.Method.Name.Equals(methodName) && + j.Value.Job.Method.DeclaringType.Name.Equals(className)); + } + + return false; } + /// + /// Checks against any jobs that are running or about to run + /// + /// + /// + /// public static bool RunningAnyTasksByMethod(IEnumerable classNames, string queue = DefaultQueue) { var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue); - return enqueuedJobs.Any(j => !j.Value.InEnqueuedState && + var ret = enqueuedJobs.Any(j => !j.Value.InEnqueuedState && classNames.Contains(j.Value.Job.Method.DeclaringType?.Name)); + if (ret) return true; + + var runningJobs = JobStorage.Current.GetMonitoringApi().ProcessingJobs(0, int.MaxValue); + return runningJobs.Any(j => classNames.Contains(j.Value.Job.Method.DeclaringType?.Name)); } } diff --git a/API/Services/Tasks/BackupService.cs b/API/Services/Tasks/BackupService.cs index 7c10dc81b..4bb371ec9 100644 --- a/API/Services/Tasks/BackupService.cs +++ b/API/Services/Tasks/BackupService.cs @@ -7,6 +7,7 @@ using System.Threading.Tasks; using API.Data; using API.Entities.Enums; using API.Extensions; +using API.Logging; using API.SignalR; using Hangfire; using Microsoft.AspNetCore.SignalR; @@ -19,30 +20,27 @@ public interface IBackupService { Task BackupDatabase(); /// - /// Returns a list of full paths of the logs files detailed in . + /// Returns a list of all log files for Kavita /// - /// - /// + /// If file rolling is enabled. Defaults to True. /// - IEnumerable GetLogFiles(int maxRollingFiles, string logFileName); + IEnumerable GetLogFiles(bool rollFiles = LogLevelOptions.LogRollingEnabled); } public class BackupService : IBackupService { private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; private readonly IDirectoryService _directoryService; - private readonly IConfiguration _config; private readonly IEventHub _eventHub; private readonly IList _backupFiles; public BackupService(ILogger logger, IUnitOfWork unitOfWork, - IDirectoryService directoryService, IConfiguration config, IEventHub eventHub) + IDirectoryService directoryService, IEventHub eventHub) { _unitOfWork = unitOfWork; _logger = logger; _directoryService = directoryService; - _config = config; _eventHub = eventHub; _backupFiles = new List() @@ -56,12 +54,17 @@ public class BackupService : IBackupService }; } - public IEnumerable GetLogFiles(int maxRollingFiles, string logFileName) + /// + /// Returns a list of all log files for Kavita + /// + /// If file rolling is enabled. Defaults to True. + /// + public IEnumerable GetLogFiles(bool rollFiles = LogLevelOptions.LogRollingEnabled) { - var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty; - var fi = _directoryService.FileSystem.FileInfo.FromFileName(logFileName); + var multipleFileRegex = rollFiles ? @"\d*" : string.Empty; + var fi = _directoryService.FileSystem.FileInfo.FromFileName(LogLevelOptions.LogFile); - var files = maxRollingFiles > 0 + var files = rollFiles ? _directoryService.GetFiles(_directoryService.LogDirectory, $@"{_directoryService.FileSystem.Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log") : new[] {_directoryService.FileSystem.Path.Join(_directoryService.LogDirectory, "kavita.log")}; @@ -137,9 +140,7 @@ public class BackupService : IBackupService private void CopyLogsToBackupDirectory(string tempDirectory) { - var maxRollingFiles = _config.GetMaxRollingFiles(); - var loggingSection = _config.GetLoggingFileName(); - var files = GetLogFiles(maxRollingFiles, loggingSection); + var files = GetLogFiles(); _directoryService.CopyFilesToDirectory(files, _directoryService.FileSystem.Path.Join(tempDirectory, "logs")); } diff --git a/API/Services/Tasks/CleanupService.cs b/API/Services/Tasks/CleanupService.cs index c33459681..0cc4d7c98 100644 --- a/API/Services/Tasks/CleanupService.cs +++ b/API/Services/Tasks/CleanupService.cs @@ -1,199 +1,273 @@ using System; +using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; using API.Data; +using API.Data.Repositories; +using API.DTOs.Filtering; +using API.Entities; using API.Entities.Enums; +using API.Helpers; using API.SignalR; using Hangfire; using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Logging; -namespace API.Services.Tasks +namespace API.Services.Tasks; + +public interface ICleanupService { - public interface ICleanupService - { - Task Cleanup(); - Task CleanupDbEntries(); - void CleanupCacheDirectory(); - Task DeleteSeriesCoverImages(); - Task DeleteChapterCoverImages(); - Task DeleteTagCoverImages(); - Task CleanupBackups(); - void CleanupTemp(); - } + Task Cleanup(); + Task CleanupDbEntries(); + void CleanupCacheAndTempDirectories(); + Task DeleteSeriesCoverImages(); + Task DeleteChapterCoverImages(); + Task DeleteTagCoverImages(); + Task CleanupBackups(); + Task CleanupLogs(); + void CleanupTemp(); /// - /// Cleans up after operations on reoccurring basis + /// Responsible to remove Series from Want To Read when user's have fully read the series and the series has Publication Status of Completed or Cancelled. /// - public class CleanupService : ICleanupService + /// + Task CleanupWantToRead(); +} +/// +/// Cleans up after operations on reoccurring basis +/// +public class CleanupService : ICleanupService +{ + private readonly ILogger _logger; + private readonly IUnitOfWork _unitOfWork; + private readonly IEventHub _eventHub; + private readonly IDirectoryService _directoryService; + + public CleanupService(ILogger logger, + IUnitOfWork unitOfWork, IEventHub eventHub, + IDirectoryService directoryService) { - private readonly ILogger _logger; - private readonly IUnitOfWork _unitOfWork; - private readonly IEventHub _eventHub; - private readonly IDirectoryService _directoryService; + _logger = logger; + _unitOfWork = unitOfWork; + _eventHub = eventHub; + _directoryService = directoryService; + } - public CleanupService(ILogger logger, - IUnitOfWork unitOfWork, IEventHub eventHub, - IDirectoryService directoryService) + + /// + /// Cleans up Temp, cache, deleted cover images, and old database backups + /// + [AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)] + public async Task Cleanup() + { + _logger.LogInformation("Starting Cleanup"); + await SendProgress(0F, "Starting cleanup"); + _logger.LogInformation("Cleaning temp directory"); + _directoryService.ClearDirectory(_directoryService.TempDirectory); + await SendProgress(0.1F, "Cleaning temp directory"); + CleanupCacheAndTempDirectories(); + await SendProgress(0.25F, "Cleaning old database backups"); + _logger.LogInformation("Cleaning old database backups"); + await CleanupBackups(); + await SendProgress(0.50F, "Cleaning deleted cover images"); + _logger.LogInformation("Cleaning deleted cover images"); + await DeleteSeriesCoverImages(); + await SendProgress(0.6F, "Cleaning deleted cover images"); + await DeleteChapterCoverImages(); + await SendProgress(0.7F, "Cleaning deleted cover images"); + await DeleteTagCoverImages(); + await DeleteReadingListCoverImages(); + await SendProgress(0.8F, "Cleaning old logs"); + await CleanupLogs(); + await SendProgress(1F, "Cleanup finished"); + _logger.LogInformation("Cleanup finished"); + } + + /// + /// Cleans up abandon rows in the DB + /// + public async Task CleanupDbEntries() + { + await _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters(); + await _unitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated(); + await _unitOfWork.GenreRepository.RemoveAllGenreNoLongerAssociated(); + await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries(); + } + + private async Task SendProgress(float progress, string subtitle) + { + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.CleanupProgressEvent(progress, subtitle)); + } + + /// + /// Removes all series images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteSeriesCoverImages() + { + var images = await _unitOfWork.SeriesRepository.GetAllCoverImagesAsync(); + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); + } + + /// + /// Removes all chapter/volume images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteChapterCoverImages() + { + var images = await _unitOfWork.ChapterRepository.GetAllCoverImagesAsync(); + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); + } + + /// + /// Removes all collection tag images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteTagCoverImages() + { + var images = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync(); + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); + } + + /// + /// Removes all reading list images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteReadingListCoverImages() + { + var images = await _unitOfWork.ReadingListRepository.GetAllCoverImagesAsync(); + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.ReadingListCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); + } + + /// + /// Removes all files and directories in the cache and temp directory + /// + public void CleanupCacheAndTempDirectories() + { + _logger.LogInformation("Performing cleanup of Cache & Temp directories"); + _directoryService.ExistOrCreate(_directoryService.CacheDirectory); + _directoryService.ExistOrCreate(_directoryService.TempDirectory); + + try { - _logger = logger; - _unitOfWork = unitOfWork; - _eventHub = eventHub; - _directoryService = directoryService; - } - - - /// - /// Cleans up Temp, cache, deleted cover images, and old database backups - /// - [AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)] - public async Task Cleanup() - { - _logger.LogInformation("Starting Cleanup"); - await SendProgress(0F, "Starting cleanup"); - _logger.LogInformation("Cleaning temp directory"); + _directoryService.ClearDirectory(_directoryService.CacheDirectory); _directoryService.ClearDirectory(_directoryService.TempDirectory); - await SendProgress(0.1F, "Cleaning temp directory"); - CleanupCacheDirectory(); - await SendProgress(0.25F, "Cleaning old database backups"); - _logger.LogInformation("Cleaning old database backups"); - await CleanupBackups(); - await SendProgress(0.50F, "Cleaning deleted cover images"); - _logger.LogInformation("Cleaning deleted cover images"); - await DeleteSeriesCoverImages(); - await SendProgress(0.6F, "Cleaning deleted cover images"); - await DeleteChapterCoverImages(); - await SendProgress(0.7F, "Cleaning deleted cover images"); - await DeleteTagCoverImages(); - await DeleteReadingListCoverImages(); - await SendProgress(1F, "Cleanup finished"); - _logger.LogInformation("Cleanup finished"); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); } - /// - /// Cleans up abandon rows in the DB - /// - public async Task CleanupDbEntries() + _logger.LogInformation("Cache and temp directory purged"); + } + + /// + /// Removes Database backups older than configured total backups. If all backups are older than total backups days, only the latest is kept. + /// + public async Task CleanupBackups() + { + var dayThreshold = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).TotalBackups; + _logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now); + var backupDirectory = + (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Value; + if (!_directoryService.Exists(backupDirectory)) return; + + var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold)); + var allBackups = _directoryService.GetFiles(backupDirectory).ToList(); + var expiredBackups = allBackups.Select(filename => _directoryService.FileSystem.FileInfo.FromFileName(filename)) + .Where(f => f.CreationTime < deltaTime) + .ToList(); + + if (expiredBackups.Count == allBackups.Count) { - await _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters(); - await _unitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated(); - await _unitOfWork.GenreRepository.RemoveAllGenreNoLongerAssociated(); - await _unitOfWork.CollectionTagRepository.RemoveTagsWithoutSeries(); + _logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold); + var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList(); + _directoryService.DeleteFiles(toDelete.Take(toDelete.Count - 1).Select(f => f.FullName)); + } + else + { + _directoryService.DeleteFiles(expiredBackups.Select(f => f.FullName)); + } + _logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now); + } + + public async Task CleanupLogs() + { + _logger.LogInformation("Performing cleanup of logs directory"); + var dayThreshold = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).TotalLogs; + var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold)); + var allLogs = _directoryService.GetFiles(_directoryService.LogDirectory).ToList(); + var expiredLogs = allLogs.Select(filename => _directoryService.FileSystem.FileInfo.FromFileName(filename)) + .Where(f => f.CreationTime < deltaTime) + .ToList(); + + if (expiredLogs.Count == allLogs.Count) + { + _logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold); + var toDelete = expiredLogs.OrderBy(f => f.CreationTime).ToList(); + _directoryService.DeleteFiles(toDelete.Take(toDelete.Count - 1).Select(f => f.FullName)); + } + else + { + _directoryService.DeleteFiles(expiredLogs.Select(f => f.FullName)); + } + _logger.LogInformation("Finished cleanup of logs at {Time}", DateTime.Now); + } + + public void CleanupTemp() + { + _logger.LogInformation("Performing cleanup of Temp directory"); + _directoryService.ExistOrCreate(_directoryService.TempDirectory); + + try + { + _directoryService.ClearDirectory(_directoryService.TempDirectory); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); } - private async Task SendProgress(float progress, string subtitle) - { - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, - MessageFactory.CleanupProgressEvent(progress, subtitle)); - } + _logger.LogInformation("Temp directory purged"); + } - /// - /// Removes all series images that are not in the database. They must follow filename pattern. - /// - public async Task DeleteSeriesCoverImages() - { - var images = await _unitOfWork.SeriesRepository.GetAllCoverImagesAsync(); - var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex); - _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); - } + public async Task CleanupWantToRead() + { + _logger.LogInformation("Performing cleanup of Series that are Completed and have been fully read that are in Want To Read list"); - /// - /// Removes all chapter/volume images that are not in the database. They must follow filename pattern. - /// - public async Task DeleteChapterCoverImages() + var libraryIds = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).Select(l => l.Id).ToList(); + var filter = new FilterDto() { - var images = await _unitOfWork.ChapterRepository.GetAllCoverImagesAsync(); - var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex); - _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); - } - - /// - /// Removes all collection tag images that are not in the database. They must follow filename pattern. - /// - public async Task DeleteTagCoverImages() - { - var images = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync(); - var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex); - _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); - } - - /// - /// Removes all reading list images that are not in the database. They must follow filename pattern. - /// - public async Task DeleteReadingListCoverImages() - { - var images = await _unitOfWork.ReadingListRepository.GetAllCoverImagesAsync(); - var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.ReadingListCoverImageRegex); - _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); - } - - /// - /// Removes all files and directories in the cache and temp directory - /// - public void CleanupCacheDirectory() - { - _logger.LogInformation("Performing cleanup of Cache directory"); - _directoryService.ExistOrCreate(_directoryService.CacheDirectory); - _directoryService.ExistOrCreate(_directoryService.TempDirectory); - - try + PublicationStatus = new List() { - _directoryService.ClearDirectory(_directoryService.CacheDirectory); - _directoryService.ClearDirectory(_directoryService.TempDirectory); - } - catch (Exception ex) + PublicationStatus.Completed, + PublicationStatus.Cancelled + }, + Libraries = libraryIds, + ReadStatus = new ReadStatus() { - _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); + Read = true, + InProgress = false, + NotRead = false } - - _logger.LogInformation("Cache directory purged"); - } - - /// - /// Removes Database backups older than configured total backups. If all backups are older than total backups days, only the latest is kept. - /// - public async Task CleanupBackups() + }; + foreach (var user in await _unitOfWork.UserRepository.GetAllUsersAsync(AppUserIncludes.WantToRead)) { - var dayThreshold = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).TotalBackups; - _logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now); - var backupDirectory = - (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Value; - if (!_directoryService.Exists(backupDirectory)) return; + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(0, user.Id, new UserParams(), filter); + var seriesIds = series.Select(s => s.Id).ToList(); + if (seriesIds.Count == 0) continue; - var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold)); - var allBackups = _directoryService.GetFiles(backupDirectory).ToList(); - var expiredBackups = allBackups.Select(filename => _directoryService.FileSystem.FileInfo.FromFileName(filename)) - .Where(f => f.CreationTime < deltaTime) - .ToList(); - - if (expiredBackups.Count == allBackups.Count) - { - _logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold); - var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList(); - _directoryService.DeleteFiles(toDelete.Take(toDelete.Count - 1).Select(f => f.FullName)); - } - else - { - _directoryService.DeleteFiles(expiredBackups.Select(f => f.FullName)); - } - _logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now); + user.WantToRead ??= new List(); + user.WantToRead = user.WantToRead.Where(s => !seriesIds.Contains(s.Id)).ToList(); + _unitOfWork.UserRepository.Update(user); } - public void CleanupTemp() + if (_unitOfWork.HasChanges()) { - _logger.LogInformation("Performing cleanup of Temp directory"); - _directoryService.ExistOrCreate(_directoryService.TempDirectory); - - try - { - _directoryService.ClearDirectory(_directoryService.TempDirectory); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); - } - - _logger.LogInformation("Temp directory purged"); + await _unitOfWork.CommitAsync(); } + + _logger.LogInformation("Performing cleanup of Series that are Completed and have been fully read that are in Want To Read list, completed"); } } diff --git a/API/Services/Tasks/Scanner/LibraryWatcher.cs b/API/Services/Tasks/Scanner/LibraryWatcher.cs index 17ea744c9..fea30b7fe 100644 --- a/API/Services/Tasks/Scanner/LibraryWatcher.cs +++ b/API/Services/Tasks/Scanner/LibraryWatcher.cs @@ -11,52 +11,6 @@ using Microsoft.Extensions.Logging; namespace API.Services.Tasks.Scanner; -/// -/// Change information -/// -public class Change -{ - /// - /// Gets or sets the type of the change. - /// - /// - /// The type of the change. - /// - public WatcherChangeTypes ChangeType { get; set; } - - /// - /// Gets or sets the full path. - /// - /// - /// The full path. - /// - public string FullPath { get; set; } - - /// - /// Gets or sets the name. - /// - /// - /// The name. - /// - public string Name { get; set; } - - /// - /// Gets or sets the old full path. - /// - /// - /// The old full path. - /// - public string OldFullPath { get; set; } - - /// - /// Gets or sets the old name. - /// - /// - /// The old name. - /// - public string OldName { get; set; } -} - public interface ILibraryWatcher { /// @@ -84,24 +38,35 @@ public class LibraryWatcher : ILibraryWatcher private readonly IDirectoryService _directoryService; private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; - private readonly IScannerService _scannerService; + private readonly ITaskScheduler _taskScheduler; - private readonly Dictionary> _watcherDictionary = new (); + private static readonly Dictionary> WatcherDictionary = new (); /// /// This is just here to prevent GC from Disposing our watchers /// - private readonly IList _fileWatchers = new List(); - private IList _libraryFolders = new List(); - + private static readonly IList FileWatchers = new List(); + /// + /// The amount of time until the Schedule ScanFolder task should be executed + /// + /// The Job will be enqueued instantly private readonly TimeSpan _queueWaitTime; + /// + /// Counts within a time frame how many times the buffer became full. Is used to reschedule LibraryWatcher to start monitoring much later rather than instantly + /// + private int _bufferFullCounter; + /// + /// Used to lock buffer Full Counter + /// + private static readonly object Lock = new (); - public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger logger, IScannerService scannerService, IHostEnvironment environment) + public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, + ILogger logger, IHostEnvironment environment, ITaskScheduler taskScheduler) { _directoryService = directoryService; _unitOfWork = unitOfWork; _logger = logger; - _scannerService = scannerService; + _taskScheduler = taskScheduler; _queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(30) : TimeSpan.FromMinutes(5); @@ -109,69 +74,75 @@ public class LibraryWatcher : ILibraryWatcher public async Task StartWatching() { - _logger.LogInformation("Starting file watchers"); + _logger.LogInformation("[LibraryWatcher] Starting file watchers"); - _libraryFolders = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()) + var libraryFolders = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()) .SelectMany(l => l.Folders) .Distinct() .Select(Parser.Parser.NormalizePath) .Where(_directoryService.Exists) .ToList(); - foreach (var libraryFolder in _libraryFolders) + + foreach (var libraryFolder in libraryFolders) { - _logger.LogDebug("Watching {FolderPath}", libraryFolder); + _logger.LogDebug("[LibraryWatcher] Watching {FolderPath}", libraryFolder); var watcher = new FileSystemWatcher(libraryFolder); watcher.Changed += OnChanged; watcher.Created += OnCreated; watcher.Deleted += OnDeleted; watcher.Error += OnError; + watcher.Disposed += (_, _) => + _logger.LogError("[LibraryWatcher] watcher was disposed when it shouldn't have been. Please report this to Kavita dev"); watcher.Filter = "*.*"; watcher.IncludeSubdirectories = true; watcher.EnableRaisingEvents = true; - _fileWatchers.Add(watcher); - if (!_watcherDictionary.ContainsKey(libraryFolder)) + FileWatchers.Add(watcher); + if (!WatcherDictionary.ContainsKey(libraryFolder)) { - _watcherDictionary.Add(libraryFolder, new List()); + WatcherDictionary.Add(libraryFolder, new List()); } - _watcherDictionary[libraryFolder].Add(watcher); + WatcherDictionary[libraryFolder].Add(watcher); } + _logger.LogInformation("[LibraryWatcher] Watching {Count} folders", FileWatchers.Count); } public void StopWatching() { - _logger.LogInformation("Stopping watching folders"); - foreach (var fileSystemWatcher in _watcherDictionary.Values.SelectMany(watcher => watcher)) + _logger.LogInformation("[LibraryWatcher] Stopping watching folders"); + foreach (var fileSystemWatcher in WatcherDictionary.Values.SelectMany(watcher => watcher)) { fileSystemWatcher.EnableRaisingEvents = false; fileSystemWatcher.Changed -= OnChanged; fileSystemWatcher.Created -= OnCreated; fileSystemWatcher.Deleted -= OnDeleted; - fileSystemWatcher.Dispose(); + fileSystemWatcher.Error -= OnError; } - _fileWatchers.Clear(); - _watcherDictionary.Clear(); + FileWatchers.Clear(); + WatcherDictionary.Clear(); } public async Task RestartWatching() { + _logger.LogDebug("[LibraryWatcher] Restarting watcher"); + StopWatching(); await StartWatching(); } private void OnChanged(object sender, FileSystemEventArgs e) { + _logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}, {ChangeType}", e.FullPath, e.Name, e.ChangeType); if (e.ChangeType != WatcherChangeTypes.Changed) return; - _logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name); - ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name))); + BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name)))); } private void OnCreated(object sender, FileSystemEventArgs e) { _logger.LogDebug("[LibraryWatcher] Created: {FullPath}, {Name}", e.FullPath, e.Name); - ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name)); + BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, !_directoryService.FileSystem.File.Exists(e.Name))); } /// @@ -183,14 +154,34 @@ public class LibraryWatcher : ILibraryWatcher var isDirectory = string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name)); if (!isDirectory) return; _logger.LogDebug("[LibraryWatcher] Deleted: {FullPath}, {Name}", e.FullPath, e.Name); - ProcessChange(e.FullPath, true); + BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, true)); } - + /// + /// On error, we count the number of errors that have occured. If the number of errors has been more than 2 in last 10 minutes, then we suspend listening for an hour + /// + /// This will schedule jobs to decrement the buffer full counter + /// + /// private void OnError(object sender, ErrorEventArgs e) { - _logger.LogError(e.GetException(), "[LibraryWatcher] An error occured, likely too many watches occured at once. Restarting Watchers"); + _logger.LogError(e.GetException(), "[LibraryWatcher] An error occured, likely too many changes occured at once or the folder being watched was deleted. Restarting Watchers"); + bool condition; + lock (Lock) + { + _bufferFullCounter += 1; + condition = _bufferFullCounter >= 3; + } + + if (condition) + { + _logger.LogInformation("[LibraryWatcher] Internal buffer has been overflown multiple times in past 10 minutes. Suspending file watching for an hour"); + StopWatching(); + BackgroundJob.Schedule(() => RestartWatching(), TimeSpan.FromHours(1)); + return; + } Task.Run(RestartWatching); + BackgroundJob.Schedule(() => UpdateLastBufferOverflow(), TimeSpan.FromMinutes(10)); } @@ -198,53 +189,79 @@ public class LibraryWatcher : ILibraryWatcher /// Processes the file or folder change. If the change is a file change and not from a supported extension, it will be ignored. /// /// This will ignore image files that are added to the system. However, they may still trigger scans due to folder changes. + /// This is public only because Hangfire will invoke it. Do not call external to this class. /// File or folder that changed /// If the change is on a directory and not a file - private void ProcessChange(string filePath, bool isDirectoryChange = false) + // ReSharper disable once MemberCanBePrivate.Global + public async Task ProcessChange(string filePath, bool isDirectoryChange = false) { var sw = Stopwatch.StartNew(); + _logger.LogDebug("[LibraryWatcher] Processing change of {FilePath}", filePath); try { - // We need to check if directory or not + // If not a directory change AND file is not an archive or book, ignore if (!isDirectoryChange && - !(Parser.Parser.IsArchive(filePath) || Parser.Parser.IsBook(filePath))) return; - - var parentDirectory = _directoryService.GetParentDirectoryName(filePath); - if (string.IsNullOrEmpty(parentDirectory)) return; - - // We need to find the library this creation belongs to - // Multiple libraries can point to the same base folder. In this case, we need use FirstOrDefault - var libraryFolder = _libraryFolders.FirstOrDefault(f => parentDirectory.Contains(f)); - if (string.IsNullOrEmpty(libraryFolder)) return; - - var rootFolder = _directoryService.GetFoldersTillRoot(libraryFolder, filePath).ToList(); - if (!rootFolder.Any()) return; - - // Select the first folder and join with library folder, this should give us the folder to scan. - var fullPath = - Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.First())); - - var alreadyScheduled = - TaskScheduler.HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {fullPath}); - _logger.LogDebug("{FullPath} already enqueued: {Value}", fullPath, alreadyScheduled); - if (!alreadyScheduled) + !(Parser.Parser.IsArchive(filePath) || Parser.Parser.IsBook(filePath))) { - _logger.LogDebug("[LibraryWatcher] Scheduling ScanFolder for {Folder}", fullPath); - BackgroundJob.Schedule(() => _scannerService.ScanFolder(fullPath), _queueWaitTime); + _logger.LogDebug("[LibraryWatcher] Change from {FilePath} is not an archive or book, ignoring change", filePath); + return; } - else + + var libraryFolders = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()) + .SelectMany(l => l.Folders) + .Distinct() + .Select(Parser.Parser.NormalizePath) + .Where(_directoryService.Exists) + .ToList(); + + var fullPath = GetFolder(filePath, libraryFolders); + _logger.LogDebug("Folder path: {FolderPath}", fullPath); + if (string.IsNullOrEmpty(fullPath)) { - _logger.LogDebug("[LibraryWatcher] Skipped scheduling ScanFolder for {Folder} as a job already queued", - fullPath); + _logger.LogDebug("[LibraryWatcher] Change from {FilePath} could not find root level folder, ignoring change", filePath); + return; } + + _taskScheduler.ScanFolder(fullPath, _queueWaitTime); } catch (Exception ex) { _logger.LogError(ex, "[LibraryWatcher] An error occured when processing a watch event"); } - _logger.LogDebug("ProcessChange occured in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds); + _logger.LogDebug("[LibraryWatcher] ProcessChange completed in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds); + } + + private string GetFolder(string filePath, IEnumerable libraryFolders) + { + var parentDirectory = _directoryService.GetParentDirectoryName(filePath); + _logger.LogDebug("[LibraryWatcher] Parent Directory: {ParentDirectory}", parentDirectory); + if (string.IsNullOrEmpty(parentDirectory)) return string.Empty; + + // We need to find the library this creation belongs to + // Multiple libraries can point to the same base folder. In this case, we need use FirstOrDefault + var libraryFolder = libraryFolders.FirstOrDefault(f => parentDirectory.Contains(f)); + _logger.LogDebug("[LibraryWatcher] Library Folder: {LibraryFolder}", libraryFolder); + if (string.IsNullOrEmpty(libraryFolder)) return string.Empty; + + var rootFolder = _directoryService.GetFoldersTillRoot(libraryFolder, filePath).ToList(); + _logger.LogDebug("[LibraryWatcher] Root Folders: {RootFolders}", rootFolder); + if (!rootFolder.Any()) return string.Empty; + + // Select the first folder and join with library folder, this should give us the folder to scan. + return Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.Last())); } - + /// + /// This is called via Hangfire to decrement the counter. Must work around a lock + /// + // ReSharper disable once MemberCanBePrivate.Global + public void UpdateLastBufferOverflow() + { + lock (Lock) + { + if (_bufferFullCounter == 0) return; + _bufferFullCounter -= 1; + } + } } diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs index d31879e84..dbd23d970 100644 --- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -1,361 +1,417 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Threading.Tasks; using API.Entities.Enums; using API.Extensions; using API.Parser; using API.SignalR; +using Kavita.Common.Helpers; using Microsoft.Extensions.Logging; -namespace API.Services.Tasks.Scanner +namespace API.Services.Tasks.Scanner; + +public class ParsedSeries { - public class ParsedSeries - { - /// - /// Name of the Series - /// - public string Name { get; init; } - /// - /// Normalized Name of the Series - /// - public string NormalizedName { get; init; } - /// - /// Format of the Series - /// - public MangaFormat Format { get; init; } - } + /// + /// Name of the Series + /// + public string Name { get; init; } + /// + /// Normalized Name of the Series + /// + public string NormalizedName { get; init; } + /// + /// Format of the Series + /// + public MangaFormat Format { get; init; } +} - public enum Modified - { - Modified = 1, - NotModified = 2 - } +public class SeriesModified +{ + public string FolderPath { get; set; } + public string SeriesName { get; set; } + public DateTime LastScanned { get; set; } + public MangaFormat Format { get; set; } + public IEnumerable LibraryRoots { get; set; } +} - public class SeriesModified + +public class ParseScannedFiles +{ + private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; + private readonly IReadingItemService _readingItemService; + private readonly IEventHub _eventHub; + + /// + /// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos. + /// Each instance is separate from other threads, allowing for no cross over. + /// + /// Logger of the parent class that invokes this + /// Directory Service + /// ReadingItemService Service for extracting information on a number of formats + /// For firing off SignalR events + public ParseScannedFiles(ILogger logger, IDirectoryService directoryService, + IReadingItemService readingItemService, IEventHub eventHub) { - public string FolderPath { get; set; } - public string SeriesName { get; set; } - public DateTime LastScanned { get; set; } - public MangaFormat Format { get; set; } + _logger = logger; + _directoryService = directoryService; + _readingItemService = readingItemService; + _eventHub = eventHub; } - public class ParseScannedFiles + /// + /// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained + /// + /// Scan directory by directory and for each, call folderAction + /// A dictionary mapping a normalized path to a list of to help scanner skip I/O + /// A library folder or series folder + /// A callback async Task to be called once all files for each folder path are found + /// If we should bypass any folder last write time checks on the scan and force I/O + public async Task ProcessFiles(string folderPath, bool scanDirectoryByDirectory, + IDictionary> seriesPaths, Func, string,Task> folderAction, bool forceCheck = false) { - private readonly ILogger _logger; - private readonly IDirectoryService _directoryService; - private readonly IReadingItemService _readingItemService; - private readonly IEventHub _eventHub; - - /// - /// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos. - /// Each instance is separate from other threads, allowing for no cross over. - /// - /// Logger of the parent class that invokes this - /// Directory Service - /// ReadingItemService Service for extracting information on a number of formats - /// For firing off SignalR events - public ParseScannedFiles(ILogger logger, IDirectoryService directoryService, - IReadingItemService readingItemService, IEventHub eventHub) + string normalizedPath; + if (scanDirectoryByDirectory) { - _logger = logger; - _directoryService = directoryService; - _readingItemService = readingItemService; - _eventHub = eventHub; + // This is used in library scan, so we should check first for a ignore file and use that here as well + var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile); + var matcher = _directoryService.CreateMatcherFromFile(potentialIgnoreFile); + var directories = _directoryService.GetDirectories(folderPath, matcher).ToList(); + + foreach (var directory in directories) + { + normalizedPath = Parser.Parser.NormalizePath(directory); + if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck)) + { + await folderAction(new List(), directory); + } + else + { + // For a scan, this is doing everything in the directory loop before the folder Action is called...which leads to no progress indication + await folderAction(_directoryService.ScanFiles(directory, matcher), directory); + } + } + + return; } - - /// - /// This will Scan all files in a folder path. For each folder within the folderPath, FolderAction will be invoked for all files contained - /// - /// Scan directory by directory and for each, call folderAction - /// A library folder or series folder - /// A callback async Task to be called once all files for each folder path are found - /// If we should bypass any folder last write time checks on the scan and force I/O - public async Task ProcessFiles(string folderPath, bool scanDirectoryByDirectory, - IDictionary> seriesPaths, Func, string,Task> folderAction, bool forceCheck = false) + normalizedPath = Parser.Parser.NormalizePath(folderPath); + if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck)) { - string normalizedPath; - if (scanDirectoryByDirectory) - { - // This is used in library scan, so we should check first for a ignore file and use that here as well - var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile); - var directories = _directoryService.GetDirectories(folderPath, _directoryService.CreateMatcherFromFile(potentialIgnoreFile)).ToList(); + await folderAction(new List(), folderPath); + return; + } + // We need to calculate all folders till library root and see if any kavitaignores + var seriesMatcher = BuildIgnoreFromLibraryRoot(folderPath, seriesPaths); - foreach (var directory in directories) + await folderAction(_directoryService.ScanFiles(folderPath, seriesMatcher), folderPath); + } + + /// + /// Used in ScanSeries, which enters at a lower level folder and hence needs a .kavitaignore from higher (up to root) to be built before + /// the scan takes place. + /// + /// + /// + /// A GlobMatter. Empty if not applicable + private GlobMatcher BuildIgnoreFromLibraryRoot(string folderPath, IDictionary> seriesPaths) + { + var seriesMatcher = new GlobMatcher(); + try + { + var roots = seriesPaths[folderPath][0].LibraryRoots.Select(Parser.Parser.NormalizePath).ToList(); + var libraryFolder = roots.SingleOrDefault(folderPath.Contains); + + if (string.IsNullOrEmpty(libraryFolder) || !Directory.Exists(folderPath)) + { + return seriesMatcher; + } + + var allParents = _directoryService.GetFoldersTillRoot(libraryFolder, folderPath); + var path = libraryFolder; + + // Apply the library root level kavitaignore + var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile); + seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile)); + + // Then apply kavitaignores for each folder down to where the series folder is + foreach (var folderPart in allParents.Reverse()) + { + path = Parser.Parser.NormalizePath(Path.Join(libraryFolder, folderPart)); + potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile); + seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile)); + } + } + catch (Exception ex) + { + _logger.LogError(ex, + "[ScannerService] There was an error trying to find and apply .kavitaignores above the Series Folder. Scanning without them present"); + } + + return seriesMatcher; + } + + + /// + /// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing. + /// This will check if the name matches an existing series name (multiple fields) + /// + /// A localized list of a series' parsed infos + /// + private void TrackSeries(ConcurrentDictionary> scannedSeries, ParserInfo info) + { + if (info.Series == string.Empty) return; + + // Check if normalized info.Series already exists and if so, update info to use that name instead + info.Series = MergeName(scannedSeries, info); + + var normalizedSeries = Parser.Parser.Normalize(info.Series); + var normalizedSortSeries = Parser.Parser.Normalize(info.SeriesSort); + var normalizedLocalizedSeries = Parser.Parser.Normalize(info.LocalizedSeries); + + try + { + var existingKey = scannedSeries.Keys.SingleOrDefault(ps => + ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries) + || ps.NormalizedName.Equals(normalizedLocalizedSeries) + || ps.NormalizedName.Equals(normalizedSortSeries))); + existingKey ??= new ParsedSeries() + { + Format = info.Format, + Name = info.Series, + NormalizedName = normalizedSeries + }; + + scannedSeries.AddOrUpdate(existingKey, new List() {info}, (_, oldValue) => + { + oldValue ??= new List(); + if (!oldValue.Contains(info)) { - normalizedPath = Parser.Parser.NormalizePath(directory); - if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck)) - { - await folderAction(new List(), directory); - } - else - { - // For a scan, this is doing everything in the directory loop before the folder Action is called...which leads to no progress indication - await folderAction(_directoryService.ScanFiles(directory), directory); - } + oldValue.Add(info); } + return oldValue; + }); + } + catch (Exception ex) + { + _logger.LogCritical(ex, "[ScannerService] {SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series); + foreach (var seriesKey in scannedSeries.Keys.Where(ps => + ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries) + || ps.NormalizedName.Equals(normalizedLocalizedSeries) + || ps.NormalizedName.Equals(normalizedSortSeries)))) + { + _logger.LogCritical("[ScannerService] Matches: {SeriesName} matches on {SeriesKey}", info.Series, seriesKey.Name); + } + } + } + + + /// + /// Using a normalized name from the passed ParserInfo, this checks against all found series so far and if an existing one exists with + /// same normalized name, it merges into the existing one. This is important as some manga may have a slight difference with punctuation or capitalization. + /// + /// + /// + /// Series Name to group this info into + private string MergeName(ConcurrentDictionary> scannedSeries, ParserInfo info) + { + var normalizedSeries = Parser.Parser.Normalize(info.Series); + var normalizedLocalSeries = Parser.Parser.Normalize(info.LocalizedSeries); + + try + { + var existingName = + scannedSeries.SingleOrDefault(p => + (Parser.Parser.Normalize(p.Key.NormalizedName).Equals(normalizedSeries) || + Parser.Parser.Normalize(p.Key.NormalizedName).Equals(normalizedLocalSeries)) && + p.Key.Format == info.Format) + .Key; + + if (existingName != null && !string.IsNullOrEmpty(existingName.Name)) + { + return existingName.Name; + } + } + catch (Exception ex) + { + _logger.LogCritical(ex, "[ScannerService] Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath); + var values = scannedSeries.Where(p => + (Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries || + Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedLocalSeries) && + p.Key.Format == info.Format); + foreach (var pair in values) + { + _logger.LogCritical("[ScannerService] Duplicate Series in DB matches with {SeriesName}: {DuplicateName}", info.Series, pair.Key.Name); + } + + } + + return info.Series; + } + + + /// + /// This will process series by folder groups. This is used solely by ScanSeries + /// + /// + /// + /// + /// If true, does a directory scan first (resulting in folders being tackled in parallel), else does an immediate scan files + /// A map of Series names -> existing folder paths to handle skipping folders + /// Action which returns if the folder was skipped and the infos from said folder + /// Defaults to false + /// + public async Task ScanLibrariesForSeries(LibraryType libraryType, + IEnumerable folders, string libraryName, bool isLibraryScan, + IDictionary> seriesPaths, Func>, Task> processSeriesInfos, bool forceCheck = false) + { + + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started)); + + async Task ProcessFolder(IList files, string folder) + { + var normalizedFolder = Parser.Parser.NormalizePath(folder); + if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck)) + { + var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo() + { + Series = fp.SeriesName, + Format = fp.Format, + }).ToList(); + await processSeriesInfos.Invoke(new Tuple>(true, parsedInfos)); + _logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", folder); return; } - normalizedPath = Parser.Parser.NormalizePath(folderPath); - if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck)) + _logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, folder); + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, + MessageFactory.FileScanProgressEvent(folder, libraryName, ProgressEventType.Updated)); + if (files.Count == 0) { - await folderAction(new List(), folderPath); + _logger.LogInformation("[ScannerService] {Folder} is empty or is no longer in this location", folder); return; } - await folderAction(_directoryService.ScanFiles(folderPath), folderPath); - } + + var scannedSeries = new ConcurrentDictionary>(); + var infos = files + .Select(file => _readingItemService.ParseFile(file, folder, libraryType)) + .Where(info => info != null) + .ToList(); - /// - /// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing. - /// This will check if the name matches an existing series name (multiple fields) - /// - /// A localized list of a series' parsed infos - /// - private void TrackSeries(ConcurrentDictionary> scannedSeries, ParserInfo info) - { - if (info.Series == string.Empty) return; + MergeLocalizedSeriesWithSeries(infos); - // Check if normalized info.Series already exists and if so, update info to use that name instead - info.Series = MergeName(scannedSeries, info); - - var normalizedSeries = Parser.Parser.Normalize(info.Series); - var normalizedSortSeries = Parser.Parser.Normalize(info.SeriesSort); - var normalizedLocalizedSeries = Parser.Parser.Normalize(info.LocalizedSeries); - - try - { - var existingKey = scannedSeries.Keys.SingleOrDefault(ps => - ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries) - || ps.NormalizedName.Equals(normalizedLocalizedSeries) - || ps.NormalizedName.Equals(normalizedSortSeries))); - existingKey ??= new ParsedSeries() - { - Format = info.Format, - Name = info.Series, - NormalizedName = normalizedSeries - }; - - scannedSeries.AddOrUpdate(existingKey, new List() {info}, (_, oldValue) => - { - oldValue ??= new List(); - if (!oldValue.Contains(info)) - { - oldValue.Add(info); - } - - return oldValue; - }); - } - catch (Exception ex) - { - _logger.LogCritical(ex, "{SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series); - foreach (var seriesKey in scannedSeries.Keys.Where(ps => - ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries) - || ps.NormalizedName.Equals(normalizedLocalizedSeries) - || ps.NormalizedName.Equals(normalizedSortSeries)))) - { - _logger.LogCritical("Matches: {SeriesName} matches on {SeriesKey}", info.Series, seriesKey.Name); - } - } - } - - - /// - /// Using a normalized name from the passed ParserInfo, this checks against all found series so far and if an existing one exists with - /// same normalized name, it merges into the existing one. This is important as some manga may have a slight difference with punctuation or capitalization. - /// - /// - /// Series Name to group this info into - private string MergeName(ConcurrentDictionary> scannedSeries, ParserInfo info) - { - var normalizedSeries = Parser.Parser.Normalize(info.Series); - var normalizedLocalSeries = Parser.Parser.Normalize(info.LocalizedSeries); - - try - { - var existingName = - scannedSeries.SingleOrDefault(p => - (Parser.Parser.Normalize(p.Key.NormalizedName).Equals(normalizedSeries) || - Parser.Parser.Normalize(p.Key.NormalizedName).Equals(normalizedLocalSeries)) && - p.Key.Format == info.Format) - .Key; - - if (existingName != null && !string.IsNullOrEmpty(existingName.Name)) - { - return existingName.Name; - } - } - catch (Exception ex) - { - _logger.LogCritical(ex, "Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath); - var values = scannedSeries.Where(p => - (Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries || - Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedLocalSeries) && - p.Key.Format == info.Format); - foreach (var pair in values) - { - _logger.LogCritical("Duplicate Series in DB matches with {SeriesName}: {DuplicateName}", info.Series, pair.Key.Name); - } - - } - - return info.Series; - } - - - /// - /// This will process series by folder groups. - /// - /// - /// - /// - /// - public async Task ScanLibrariesForSeries(LibraryType libraryType, - IEnumerable folders, string libraryName, bool isLibraryScan, - IDictionary> seriesPaths, Action>> processSeriesInfos, bool forceCheck = false) - { - - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started)); - - foreach (var folderPath in folders) + foreach (var info in infos) { try { - await ProcessFiles(folderPath, isLibraryScan, seriesPaths, async (files, folder) => - { - var normalizedFolder = Parser.Parser.NormalizePath(folder); - if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck)) - { - var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo() - { - Series = fp.SeriesName, - Format = fp.Format, - }).ToList(); - processSeriesInfos.Invoke(new Tuple>(true, parsedInfos)); - _logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder); - return; - } - _logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(folderPath, libraryName, ProgressEventType.Updated)); - if (files.Count == 0) - { - _logger.LogInformation("[ScannerService] {Folder} is empty", folder); - return; - } - var scannedSeries = new ConcurrentDictionary>(); - var infos = files - .Select(file => _readingItemService.ParseFile(file, folderPath, libraryType)) - .Where(info => info != null) - .ToList(); - - - MergeLocalizedSeriesWithSeries(infos); - - foreach (var info in infos) - { - try - { - TrackSeries(scannedSeries, info); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an exception that occurred during tracking {FilePath}. Skipping this file", info.FullFilePath); - } - } - - // It would be really cool if we can emit an event when a folder hasn't been changed so we don't parse everything, but the first item to ensure we don't delete it - // Otherwise, we can do a last step in the DB where we validate all files on disk exist and if not, delete them. (easy but slow) - foreach (var series in scannedSeries.Keys) - { - if (scannedSeries[series].Count > 0 && processSeriesInfos != null) - { - processSeriesInfos.Invoke(new Tuple>(false, scannedSeries[series])); - } - } - }, forceCheck); + TrackSeries(scannedSeries, info); } - catch (ArgumentException ex) + catch (Exception ex) { - _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath); + _logger.LogError(ex, + "[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file", + info.FullFilePath); } } - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", libraryName, ProgressEventType.Ended)); + foreach (var series in scannedSeries.Keys) + { + if (scannedSeries[series].Count > 0 && processSeriesInfos != null) + { + await processSeriesInfos.Invoke(new Tuple>(false, scannedSeries[series])); + } + } } - /// - /// Checks against all folder paths on file if the last scanned is >= the directory's last write down to the second - /// - /// - /// - /// - /// - private bool HasSeriesFolderNotChangedSinceLastScan(IDictionary> seriesPaths, string normalizedFolder, bool forceCheck = false) - { - if (forceCheck) return false; - return seriesPaths.ContainsKey(normalizedFolder) && seriesPaths[normalizedFolder].All(f => f.LastScanned.Truncate(TimeSpan.TicksPerSecond) >= - _directoryService.GetLastWriteTime(normalizedFolder).Truncate(TimeSpan.TicksPerSecond)); + foreach (var folderPath in folders) + { + try + { + await ProcessFiles(folderPath, isLibraryScan, seriesPaths, ProcessFolder, forceCheck); + } + catch (ArgumentException ex) + { + _logger.LogError(ex, "[ScannerService] The directory '{FolderPath}' does not exist", folderPath); + } } - /// - /// Checks if there are any ParserInfos that have a Series that matches the LocalizedSeries field in any other info. If so, - /// rewrites the infos with series name instead of the localized name, so they stack. - /// - /// - /// Accel World v01.cbz has Series "Accel World" and Localized Series "World of Acceleration" - /// World of Acceleration v02.cbz has Series "World of Acceleration" - /// After running this code, we'd have: - /// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration" - /// - /// A collection of ParserInfos - private void MergeLocalizedSeriesWithSeries(IReadOnlyCollection infos) + await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", libraryName, ProgressEventType.Ended)); + } + + /// + /// Checks against all folder paths on file if the last scanned is >= the directory's last write down to the second + /// + /// + /// + /// + /// + private bool HasSeriesFolderNotChangedSinceLastScan(IDictionary> seriesPaths, string normalizedFolder, bool forceCheck = false) + { + if (forceCheck) return false; + + return seriesPaths.ContainsKey(normalizedFolder) && seriesPaths[normalizedFolder].All(f => f.LastScanned.Truncate(TimeSpan.TicksPerSecond) >= + _directoryService.GetLastWriteTime(normalizedFolder).Truncate(TimeSpan.TicksPerSecond)); + } + + /// + /// Checks if there are any ParserInfos that have a Series that matches the LocalizedSeries field in any other info. If so, + /// rewrites the infos with series name instead of the localized name, so they stack. + /// + /// + /// Accel World v01.cbz has Series "Accel World" and Localized Series "World of Acceleration" + /// World of Acceleration v02.cbz has Series "World of Acceleration" + /// After running this code, we'd have: + /// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration" + /// + /// A collection of ParserInfos + private void MergeLocalizedSeriesWithSeries(IReadOnlyCollection infos) + { + var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries)); + if (!hasLocalizedSeries) return; + + var localizedSeries = infos + .Where(i => !i.IsSpecial) + .Select(i => i.LocalizedSeries) + .Distinct() + .FirstOrDefault(i => !string.IsNullOrEmpty(i)); + if (string.IsNullOrEmpty(localizedSeries)) return; + + // NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves. + string nonLocalizedSeries; + // Normalize this as many of the cases is a capitalization difference + var nonLocalizedSeriesFound = infos + .Where(i => !i.IsSpecial) + .Select(i => i.Series).DistinctBy(Parser.Parser.Normalize).ToList(); + if (nonLocalizedSeriesFound.Count == 1) { - var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries)); - if (!hasLocalizedSeries) return; - - var localizedSeries = infos - .Where(i => !i.IsSpecial) - .Select(i => i.LocalizedSeries) - .Distinct() - .FirstOrDefault(i => !string.IsNullOrEmpty(i)); - if (string.IsNullOrEmpty(localizedSeries)) return; - - // NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves. - string nonLocalizedSeries; - // Normalize this as many of the cases is a capitalization difference - var nonLocalizedSeriesFound = infos - .Where(i => !i.IsSpecial) - .Select(i => i.Series).DistinctBy(Parser.Parser.Normalize).ToList(); - if (nonLocalizedSeriesFound.Count == 1) + nonLocalizedSeries = nonLocalizedSeriesFound.First(); + } + else + { + // There can be a case where there are multiple series in a folder that causes merging. + if (nonLocalizedSeriesFound.Count > 2) { - nonLocalizedSeries = nonLocalizedSeriesFound.First(); - } - else - { - // There can be a case where there are multiple series in a folder that causes merging. - if (nonLocalizedSeriesFound.Count > 2) - { - _logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder or ensure there is only 2 potential series (localized and series): {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound)); - } - nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries)); + _logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder or ensure there is only 2 potential series (localized and series): {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound)); } + nonLocalizedSeries = nonLocalizedSeriesFound.FirstOrDefault(s => !s.Equals(localizedSeries)); + } - if (string.IsNullOrEmpty(nonLocalizedSeries)) return; + if (string.IsNullOrEmpty(nonLocalizedSeries)) return; - var normalizedNonLocalizedSeries = Parser.Parser.Normalize(nonLocalizedSeries); - foreach (var infoNeedingMapping in infos.Where(i => - !Parser.Parser.Normalize(i.Series).Equals(normalizedNonLocalizedSeries))) - { - infoNeedingMapping.Series = nonLocalizedSeries; - infoNeedingMapping.LocalizedSeries = localizedSeries; - } + var normalizedNonLocalizedSeries = Parser.Parser.Normalize(nonLocalizedSeries); + foreach (var infoNeedingMapping in infos.Where(i => + !Parser.Parser.Normalize(i.Series).Equals(normalizedNonLocalizedSeries))) + { + infoNeedingMapping.Series = nonLocalizedSeries; + infoNeedingMapping.LocalizedSeries = localizedSeries; } } } diff --git a/API/Services/Tasks/Scanner/Parser/DefaultParser.cs b/API/Services/Tasks/Scanner/Parser/DefaultParser.cs index 60317e97d..4bab428a3 100644 --- a/API/Services/Tasks/Scanner/Parser/DefaultParser.cs +++ b/API/Services/Tasks/Scanner/Parser/DefaultParser.cs @@ -1,9 +1,9 @@ using System.IO; using System.Linq; using API.Entities.Enums; -using API.Services; +using API.Parser; -namespace API.Parser; +namespace API.Services.Tasks.Scanner.Parser; public interface IDefaultParser { @@ -36,81 +36,81 @@ public class DefaultParser : IDefaultParser var fileName = _directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath); ParserInfo ret; - if (Services.Tasks.Scanner.Parser.Parser.IsEpub(filePath)) + if (Parser.IsEpub(filePath)) { - ret = new ParserInfo() + ret = new ParserInfo { - Chapters = Services.Tasks.Scanner.Parser.Parser.ParseChapter(fileName) ?? Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(fileName), - Series = Services.Tasks.Scanner.Parser.Parser.ParseSeries(fileName) ?? Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(fileName), - Volumes = Services.Tasks.Scanner.Parser.Parser.ParseVolume(fileName) ?? Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(fileName), + Chapters = Parser.ParseChapter(fileName) ?? Parser.ParseComicChapter(fileName), + Series = Parser.ParseSeries(fileName) ?? Parser.ParseComicSeries(fileName), + Volumes = Parser.ParseVolume(fileName) ?? Parser.ParseComicVolume(fileName), Filename = Path.GetFileName(filePath), - Format = Services.Tasks.Scanner.Parser.Parser.ParseFormat(filePath), + Format = Parser.ParseFormat(filePath), FullFilePath = filePath }; } else { - ret = new ParserInfo() + ret = new ParserInfo { - Chapters = type == LibraryType.Comic ? Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(fileName) : Services.Tasks.Scanner.Parser.Parser.ParseChapter(fileName), - Series = type == LibraryType.Comic ? Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(fileName) : Services.Tasks.Scanner.Parser.Parser.ParseSeries(fileName), - Volumes = type == LibraryType.Comic ? Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(fileName) : Services.Tasks.Scanner.Parser.Parser.ParseVolume(fileName), + Chapters = type == LibraryType.Comic ? Parser.ParseComicChapter(fileName) : Parser.ParseChapter(fileName), + Series = type == LibraryType.Comic ? Parser.ParseComicSeries(fileName) : Parser.ParseSeries(fileName), + Volumes = type == LibraryType.Comic ? Parser.ParseComicVolume(fileName) : Parser.ParseVolume(fileName), Filename = Path.GetFileName(filePath), - Format = Services.Tasks.Scanner.Parser.Parser.ParseFormat(filePath), + Format = Parser.ParseFormat(filePath), Title = Path.GetFileNameWithoutExtension(fileName), FullFilePath = filePath }; } - if (Services.Tasks.Scanner.Parser.Parser.IsImage(filePath) && Services.Tasks.Scanner.Parser.Parser.IsCoverImage(filePath)) return null; + if (Parser.IsCoverImage(_directoryService.FileSystem.Path.GetFileName(filePath))) return null; - if (Services.Tasks.Scanner.Parser.Parser.IsImage(filePath)) + if (Parser.IsImage(filePath)) { // Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders. - ret.Volumes = Services.Tasks.Scanner.Parser.Parser.DefaultVolume; - ret.Chapters = Services.Tasks.Scanner.Parser.Parser.DefaultChapter; + ret.Volumes = Parser.DefaultVolume; + ret.Chapters = Parser.DefaultChapter; ret.Series = string.Empty; } - if (ret.Series == string.Empty || Services.Tasks.Scanner.Parser.Parser.IsImage(filePath)) + if (ret.Series == string.Empty || Parser.IsImage(filePath)) { // Try to parse information out of each folder all the way to rootPath ParseFromFallbackFolders(filePath, rootPath, type, ref ret); } - var edition = Services.Tasks.Scanner.Parser.Parser.ParseEdition(fileName); + var edition = Parser.ParseEdition(fileName); if (!string.IsNullOrEmpty(edition)) { - ret.Series = Services.Tasks.Scanner.Parser.Parser.CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic); + ret.Series = Parser.CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic); ret.Edition = edition; } - var isSpecial = type == LibraryType.Comic ? Services.Tasks.Scanner.Parser.Parser.ParseComicSpecial(fileName) : Services.Tasks.Scanner.Parser.Parser.ParseMangaSpecial(fileName); + var isSpecial = type == LibraryType.Comic ? Parser.IsComicSpecial(fileName) : Parser.IsMangaSpecial(fileName); // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that // could cause a problem as Omake is a special term, but there is valid volume/chapter information. - if (ret.Chapters == Services.Tasks.Scanner.Parser.Parser.DefaultChapter && ret.Volumes == Services.Tasks.Scanner.Parser.Parser.DefaultVolume && !string.IsNullOrEmpty(isSpecial)) + if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.DefaultVolume && isSpecial) { ret.IsSpecial = true; ParseFromFallbackFolders(filePath, rootPath, type, ref ret); // NOTE: This can cause some complications, we should try to be a bit less aggressive to fallback to folder } // If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name - if (Services.Tasks.Scanner.Parser.Parser.HasSpecialMarker(fileName)) + if (Parser.HasSpecialMarker(fileName)) { ret.IsSpecial = true; - ret.Chapters = Services.Tasks.Scanner.Parser.Parser.DefaultChapter; - ret.Volumes = Services.Tasks.Scanner.Parser.Parser.DefaultVolume; + ret.Chapters = Parser.DefaultChapter; + ret.Volumes = Parser.DefaultVolume; ParseFromFallbackFolders(filePath, rootPath, type, ref ret); } if (string.IsNullOrEmpty(ret.Series)) { - ret.Series = Services.Tasks.Scanner.Parser.Parser.CleanTitle(fileName, type is LibraryType.Comic); + ret.Series = Parser.CleanTitle(fileName, type is LibraryType.Comic); } // Pdfs may have .pdf in the series name, remove that - if (Services.Tasks.Scanner.Parser.Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf")) + if (Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf")) { ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length); } @@ -127,35 +127,55 @@ public class DefaultParser : IDefaultParser /// Expects a non-null ParserInfo which this method will populate public void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret) { - var fallbackFolders = _directoryService.GetFoldersTillRoot(rootPath, filePath).ToList(); + var fallbackFolders = _directoryService.GetFoldersTillRoot(rootPath, filePath) + .Where(f => !Parser.IsMangaSpecial(f)) + .ToList(); + + if (fallbackFolders.Count == 0) + { + var rootFolderName = _directoryService.FileSystem.DirectoryInfo.FromDirectoryName(rootPath).Name; + var series = Parser.ParseSeries(rootFolderName); + + if (string.IsNullOrEmpty(series)) + { + ret.Series = Parser.CleanTitle(rootFolderName, type is LibraryType.Comic); + return; + } + + if (!string.IsNullOrEmpty(series) && (string.IsNullOrEmpty(ret.Series) || !rootFolderName.Contains(ret.Series))) + { + ret.Series = series; + return; + } + } + for (var i = 0; i < fallbackFolders.Count; i++) { var folder = fallbackFolders[i]; - if (!string.IsNullOrEmpty(Services.Tasks.Scanner.Parser.Parser.ParseMangaSpecial(folder))) continue; - var parsedVolume = type is LibraryType.Manga ? Services.Tasks.Scanner.Parser.Parser.ParseVolume(folder) : Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(folder); - var parsedChapter = type is LibraryType.Manga ? Services.Tasks.Scanner.Parser.Parser.ParseChapter(folder) : Services.Tasks.Scanner.Parser.Parser.ParseComicChapter(folder); + var parsedVolume = type is LibraryType.Manga ? Parser.ParseVolume(folder) : Parser.ParseComicVolume(folder); + var parsedChapter = type is LibraryType.Manga ? Parser.ParseChapter(folder) : Parser.ParseComicChapter(folder); - if (!parsedVolume.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume) || !parsedChapter.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultChapter)) + if (!parsedVolume.Equals(Parser.DefaultVolume) || !parsedChapter.Equals(Parser.DefaultChapter)) { - if ((string.IsNullOrEmpty(ret.Volumes) || ret.Volumes.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume)) && !parsedVolume.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultVolume)) - { - ret.Volumes = parsedVolume; - } - if ((string.IsNullOrEmpty(ret.Chapters) || ret.Chapters.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultChapter)) && !parsedChapter.Equals(Services.Tasks.Scanner.Parser.Parser.DefaultChapter)) - { - ret.Chapters = parsedChapter; - } + if ((string.IsNullOrEmpty(ret.Volumes) || ret.Volumes.Equals(Parser.DefaultVolume)) && !parsedVolume.Equals(Parser.DefaultVolume)) + { + ret.Volumes = parsedVolume; + } + if ((string.IsNullOrEmpty(ret.Chapters) || ret.Chapters.Equals(Parser.DefaultChapter)) && !parsedChapter.Equals(Parser.DefaultChapter)) + { + ret.Chapters = parsedChapter; + } } // Generally users group in series folders. Let's try to parse series from the top folder if (!folder.Equals(ret.Series) && i == fallbackFolders.Count - 1) { - var series = Services.Tasks.Scanner.Parser.Parser.ParseSeries(folder); + var series = Parser.ParseSeries(folder); if (string.IsNullOrEmpty(series)) { - ret.Series = Services.Tasks.Scanner.Parser.Parser.CleanTitle(folder, type is LibraryType.Comic); + ret.Series = Parser.CleanTitle(folder, type is LibraryType.Comic); break; } diff --git a/API/Services/Tasks/Scanner/Parser/Parser.cs b/API/Services/Tasks/Scanner/Parser/Parser.cs index 8db88333e..8a7e16933 100644 --- a/API/Services/Tasks/Scanner/Parser/Parser.cs +++ b/API/Services/Tasks/Scanner/Parser/Parser.cs @@ -5,1085 +5,1033 @@ using System.Linq; using System.Text.RegularExpressions; using API.Entities.Enums; -namespace API.Services.Tasks.Scanner.Parser +namespace API.Services.Tasks.Scanner.Parser; + +public static class Parser { - public static class Parser + public const string DefaultChapter = "0"; + public const string DefaultVolume = "0"; + private static readonly TimeSpan RegexTimeout = TimeSpan.FromMilliseconds(500); + + public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg|\.webp|\.gif)"; + public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|\.cb7|\.cbt"; + private const string BookFileExtensions = @"\.epub|\.pdf"; + public const string MacOsMetadataFileStartsWith = @"._"; + + public const string SupportedExtensions = + ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions; + + private const RegexOptions MatchOptions = + RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant; + + /// + /// Matches against font-family css syntax. Does not match if url import has data: starting, as that is binary data + /// + /// See here for some examples https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face + public static readonly Regex FontSrcUrlRegex = new Regex(@"(?(?:src:\s?)?(?:url|local)\((?!data:)" + "(?:[\"']?)" + @"(?!data:))" + + "(?(?!data:)[^\"']+?)" + "(?[\"']?" + @"\);?)", + MatchOptions, RegexTimeout); + /// + /// https://developer.mozilla.org/en-US/docs/Web/CSS/@import + /// + public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s([\"|']|url\\([\"|']))(?[^'\"]+)([\"|']\\)?);", + MatchOptions | RegexOptions.Multiline, RegexTimeout); + /// + /// Misc css image references, like background-image: url(), border-image, or list-style-image + /// + /// Original prepend: (background|border|list-style)-image:\s?)? + public static readonly Regex CssImageUrlRegex = new Regex(@"(url\((?!data:).(?!data:))" + "(?(?!data:)[^\"']*)" + @"(.\))", + MatchOptions, RegexTimeout); + + + private const string XmlRegexExtensions = @"\.xml"; + private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, + MatchOptions, RegexTimeout); + private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, + MatchOptions, RegexTimeout); + private static readonly Regex ComicInfoArchiveRegex = new Regex(@"\.cbz|\.cbr|\.cb7|\.cbt", + MatchOptions, RegexTimeout); + private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, + MatchOptions, RegexTimeout); + private static readonly Regex BookFileRegex = new Regex(BookFileExtensions, + MatchOptions, RegexTimeout); + private static readonly Regex CoverImageRegex = new Regex(@"(? + /// Recognizes the Special token only + /// + private static readonly Regex SpecialTokenRegex = new Regex(@"SP\d+", + MatchOptions, RegexTimeout); + + private const string Number = @"\d+(\.\d)?"; + private const string NumberRange = Number + @"(-" + Number + @")?"; + + // Some generic reusage regex patterns: + // - non greedy matching of a string where parenthesis are balanced + public const string BalancedParen = @"(?:[^()]|(?\()|(?<-open>\)))*?(?(open)(?!))"; + // - non greedy matching of a string where square brackets are balanced + public const string BalancedBrack = @"(?:[^\[\]]|(?\[)|(?<-open>\]))*?(?(open)(?!))"; + + private static readonly Regex[] MangaVolumeRegex = new[] { - public const string DefaultChapter = "0"; - public const string DefaultVolume = "0"; - private static readonly TimeSpan RegexTimeout = TimeSpan.FromMilliseconds(500); + // Dance in the Vampire Bund v16-17 + new Regex( + @"(?.*)(\b|_)v(?\d+-?\d+)( |_)", + MatchOptions, RegexTimeout), + // NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar + new Regex( + @"(?.*)(\b|_)(?!\[)(vol\.?)(?\d+(-\d+)?)(?!\])", + MatchOptions, RegexTimeout), + // TODO: In .NET 7, update this to use raw literal strings and apply the NumberRange everywhere + // Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17 + new Regex( + @"(?.*)(\b|_)(?!\[)v(?" + NumberRange + @")(?!\])", + MatchOptions, RegexTimeout), + // Kodomo no Jikan vol. 10, [dmntsf.net] One Piece - Digital Colored Comics Vol. 20.5-21.5 Ch. 177 + new Regex( + @"(?.*)(\b|_)(vol\.? ?)(?\d+(\.\d)?(-\d+)?(\.\d)?)", + MatchOptions, RegexTimeout), + // Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) + new Regex( + @"(vol\.? ?)(?\d+(\.\d)?)", + MatchOptions, RegexTimeout), + // Tonikaku Cawaii [Volume 11].cbz + new Regex( + @"(volume )(?\d+(\.\d)?)", + MatchOptions, RegexTimeout), + // Tower Of God S01 014 (CBT) (digital).cbz + new Regex( + @"(?.*)(\b|_|)(S(?\d+))", + MatchOptions, RegexTimeout), + // vol_001-1.cbz for MangaPy default naming convention + new Regex( + @"(vol_)(?\d+(\.\d)?)", + MatchOptions, RegexTimeout), + // Chinese Volume: 第n卷 -> Volume n, 第n册 -> Volume n, 幽游白书完全版 第03卷 天下 or 阿衰online 第1册 + new Regex( + @"第(?\d+)(卷|册)", + MatchOptions, RegexTimeout), + // Chinese Volume: 卷n -> Volume n, 册n -> Volume n + new Regex( + @"(卷|册)(?\d+)", + MatchOptions, RegexTimeout), + // Korean Volume: 제n화|권|회|장 -> Volume n, n화|권|회|장 -> Volume n, 63권#200.zip -> Volume 63 (no chapter, #200 is just files inside) + new Regex( + @"제?(?\d+(\.\d)?)(권|회|화|장)", + MatchOptions, RegexTimeout), + // Korean Season: 시즌n -> Season n, + new Regex( + @"시즌(?\d+\-?\d+)", + MatchOptions, RegexTimeout), + // Korean Season: 시즌n -> Season n, n시즌 -> season n + new Regex( + @"(?\d+(\-|~)?\d+?)시즌", + MatchOptions, RegexTimeout), + // Korean Season: 시즌n -> Season n, n시즌 -> season n + new Regex( + @"시즌(?\d+(\-|~)?\d+?)", + MatchOptions, RegexTimeout), + // Japanese Volume: n巻 -> Volume n + new Regex( + @"(?\d+(?:(\-)\d+)?)巻", + MatchOptions, RegexTimeout), + // Russian Volume: Том n -> Volume n, Тома n -> Volume + new Regex( + @"Том(а?)(\.?)(\s|_)?(?\d+(?:(\-)\d+)?)", + MatchOptions, RegexTimeout), + // Russian Volume: n Том -> Volume n + new Regex( + @"(\s|_)?(?\d+(?:(\-)\d+)?)(\s|_)Том(а?)", + MatchOptions, RegexTimeout), + }; - public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg|\.webp|\.gif)"; - public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|\.cb7|\.cbt"; - private const string BookFileExtensions = @"\.epub|\.pdf"; - public const string MacOsMetadataFileStartsWith = @"._"; + private static readonly Regex[] MangaSeriesRegex = new[] + { + // Russian Volume: Том n -> Volume n, Тома n -> Volume + new Regex( + @"(?.+?)Том(а?)(\.?)(\s|_)?(?\d+(?:(\-)\d+)?)", + MatchOptions, RegexTimeout), + // Russian Volume: n Том -> Volume n + new Regex( + @"(?.+?)(\s|_)?(?\d+(?:(\-)\d+)?)(\s|_)Том(а?)", + MatchOptions, RegexTimeout), + // Russian Chapter: n Главa -> Chapter n + new Regex( + @"(?.+?)(?!Том)(?\d+(?:\.\d+|-\d+)?)(\s|_)(Глава|глава|Главы|Глава)", + MatchOptions, RegexTimeout), + // Russian Chapter: Главы n -> Chapter n + new Regex( + @"(?.+?)(Глава|глава|Главы|Глава)(\.?)(\s|_)?(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Grand Blue Dreaming - SP02 + new Regex( + @"(?.*)(\b|_|-|\s)(?:sp)\d", + MatchOptions, RegexTimeout), + // [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar, Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz + new Regex( + @"^(?.*)( |_)Vol\.?(\d+|tbd)", + MatchOptions, RegexTimeout), + // Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex), + // The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake + new Regex( + @"(?.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?\d+)", + MatchOptions, + RegexTimeout), + // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip + new Regex( + @"(?.*)(\b|_)v(?\d+-?\d*)(\s|_|-)", + MatchOptions, + RegexTimeout), + // Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto] + new Regex( + @"(?.*)( - )(?:v|vo|c|chapters)\d", + MatchOptions, RegexTimeout), + // Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip + new Regex( + @"(?.*)(?:, Chapter )(?\d+)", + MatchOptions, RegexTimeout), + // Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz, My Charms Are Wasted on Kuroiwa Medaka - Ch. 37.5 - Volume Extras + new Regex( + @"(?.+?)(\s|_|-)(?!Vol)(\s|_|-)((?:Chapter)|(?:Ch\.))(\s|_|-)(?\d+)", + MatchOptions, RegexTimeout), + // [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz + new Regex( + @"(?.*) (\b|_|-)(vol)\.?(\s|-|_)?\d+", + MatchOptions, RegexTimeout), + // [xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans] + new Regex( + @"(?.*) (\b|_|-)(vol)(ume)", + MatchOptions, + RegexTimeout), + //Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans] + new Regex( + @"(?.*)(\bc\d+\b)", + MatchOptions, RegexTimeout), + //Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz + new Regex( + @"(?.*)(?: _|-|\[|\()\s?vol(ume)?", + MatchOptions, RegexTimeout), + // Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz + new Regex( + @"^(?(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d", + MatchOptions, RegexTimeout), + // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) + new Regex( + @"(?.*) (\b|_|-)(v|ch\.?|c|s)\d+", + MatchOptions, RegexTimeout), + // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz + new Regex( + @"(?.*)\s+(?\d+)\s+(?:\(\d{4}\))\s", + MatchOptions, RegexTimeout), + // Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire) + new Regex( + @"(?.*) (-)?(?\d+(?:.\d+|-\d+)?) \(\d{4}\)", + MatchOptions, RegexTimeout), + // Noblesse - Episode 429 (74 Pages).7z + new Regex( + @"(?.*)(\s|_)(?:Episode|Ep\.?)(\s|_)(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ) + new Regex( + @"(?.*)\(\d", + MatchOptions, RegexTimeout), + // Tonikaku Kawaii (Ch 59-67) (Ongoing) + new Regex( + @"(?.*)(\s|_)\((c\s|ch\s|chapter\s)", + MatchOptions, RegexTimeout), + // Fullmetal Alchemist chapters 101-108 + new Regex( + @"(?.+?)(\s|_|\-)+?chapters(\s|_|\-)+?\d+(\s|_|\-)+?", + MatchOptions, RegexTimeout), + // It's Witching Time! 001 (Digital) (Anonymous1234) + new Regex( + @"(?.+?)(\s|_|\-)+?\d+(\s|_|\-)\(", + MatchOptions, RegexTimeout), + //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip + // due to duplicate version identifiers in file. + new Regex( + @"(?.*)(v|s)\d+(-\d+)?(_|\s)", + MatchOptions, RegexTimeout), + //[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip + new Regex( + @"(?.*)(v|s)\d+(-\d+)?", + MatchOptions, RegexTimeout), + // Black Bullet (This is very loose, keep towards bottom) + new Regex( + @"(?.*)(_)(v|vo|c|volume)( |_)\d+", + MatchOptions, RegexTimeout), + // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar + new Regex( + @"(?.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)", + MatchOptions, RegexTimeout), + // Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1 + new Regex( + @"(?.*)( |_)(?:Chp.? ?\d+)", + MatchOptions, RegexTimeout), + // Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01 + new Regex( + @"^(?!Vol)(?.*)( |_)Chapter( |_)(\d+)", + MatchOptions, RegexTimeout), - public const string SupportedExtensions = - ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions; + // Fullmetal Alchemist chapters 101-108.cbz + new Regex( + @"^(?!vol)(?.*)( |_)(chapters( |_)?)\d+-?\d*", + MatchOptions, RegexTimeout), + // Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1 + new Regex( + @"^(?!Vol\.?)(?.*)( |_|-)(?.*)ch\d+-?\d?", + MatchOptions, RegexTimeout), + // Magi - Ch.252-005.cbz + new Regex( + @"(?.*)( ?- ?)Ch\.\d+-?\d*", + MatchOptions, RegexTimeout), + // [BAA]_Darker_than_Black_Omake-1.zip + new Regex( + @"^(?!Vol)(?.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?.*)( |_)(\d+) + MatchOptions, RegexTimeout), + // Kodoja #001 (March 2016) + new Regex( + @"(?.*)(\s|_|-)#", + MatchOptions, RegexTimeout), + // Baketeriya ch01-05.zip, Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar, A Compendium of Ghosts - 031 - The Third Story_ Part 12 (Digital) (Cobalt001) + new Regex( + @"^(?!Vol\.?)(?!Chapter)(?.+?)(\s|_|-)(?.*)( |_|-)(ch?)\d+", + MatchOptions, RegexTimeout), + // Japanese Volume: n巻 -> Volume n + new Regex( + @"(?.+?)第(?\d+(?:(\-)\d+)?)巻", + MatchOptions, RegexTimeout), - private const RegexOptions MatchOptions = - RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant; + }; - /// - /// Matches against font-family css syntax. Does not match if url import has data: starting, as that is binary data - /// - /// See here for some examples https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face - public static readonly Regex FontSrcUrlRegex = new Regex(@"(?(?:src:\s?)?(?:url|local)\((?!data:)" + "(?:[\"']?)" + @"(?!data:))" - + "(?(?!data:)[^\"']+?)" + "(?[\"']?" + @"\);?)", - MatchOptions, RegexTimeout); - /// - /// https://developer.mozilla.org/en-US/docs/Web/CSS/@import - /// - public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s([\"|']|url\\([\"|']))(?[^'\"]+)([\"|']\\)?);", - MatchOptions | RegexOptions.Multiline, RegexTimeout); - /// - /// Misc css image references, like background-image: url(), border-image, or list-style-image - /// - /// Original prepend: (background|border|list-style)-image:\s?)? - public static readonly Regex CssImageUrlRegex = new Regex(@"(url\((?!data:).(?!data:))" + "(?(?!data:)[^\"']*)" + @"(.\))", - MatchOptions, RegexTimeout); - - - private const string XmlRegexExtensions = @"\.xml"; - private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, - MatchOptions, RegexTimeout); - private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, - MatchOptions, RegexTimeout); - private static readonly Regex ComicInfoArchiveRegex = new Regex(@"\.cbz|\.cbr|\.cb7|\.cbt", - MatchOptions, RegexTimeout); - private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, - MatchOptions, RegexTimeout); - private static readonly Regex BookFileRegex = new Regex(BookFileExtensions, - MatchOptions, RegexTimeout); - private static readonly Regex CoverImageRegex = new Regex(@"(? - /// Recognizes the Special token only - /// - private static readonly Regex SpecialTokenRegex = new Regex(@"SP\d+", - MatchOptions, RegexTimeout); - - - private static readonly Regex[] MangaVolumeRegex = new[] - { - // Dance in the Vampire Bund v16-17 - new Regex( - @"(?.*)(\b|_)v(?\d+-?\d+)( |_)", - MatchOptions, RegexTimeout), - // NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar - new Regex( - @"(?.*)(\b|_)(?!\[)(vol\.?)(?\d+(-\d+)?)(?!\])", - MatchOptions, RegexTimeout), - // Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17 - new Regex( - @"(?.*)(\b|_)(?!\[)v(?\d+(-\d+)?)(?!\])", - MatchOptions, RegexTimeout), - // Kodomo no Jikan vol. 10, [dmntsf.net] One Piece - Digital Colored Comics Vol. 20.5-21.5 Ch. 177 - new Regex( - @"(?.*)(\b|_)(vol\.? ?)(?\d+(\.\d)?(-\d+)?(\.\d)?)", - MatchOptions, RegexTimeout), - // Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) - new Regex( - @"(vol\.? ?)(?\d+(\.\d)?)", - MatchOptions, RegexTimeout), - // Tonikaku Cawaii [Volume 11].cbz - new Regex( - @"(volume )(?\d+(\.\d)?)", - MatchOptions, RegexTimeout), - // Tower Of God S01 014 (CBT) (digital).cbz - new Regex( - @"(?.*)(\b|_|)(S(?\d+))", - MatchOptions, RegexTimeout), - // vol_001-1.cbz for MangaPy default naming convention - new Regex( - @"(vol_)(?\d+(\.\d)?)", - MatchOptions, RegexTimeout), - // Chinese Volume: 第n卷 -> Volume n, 第n册 -> Volume n, 幽游白书完全版 第03卷 天下 or 阿衰online 第1册 - new Regex( - @"第(?\d+)(卷|册)", - MatchOptions, RegexTimeout), - // Chinese Volume: 卷n -> Volume n, 册n -> Volume n - new Regex( - @"(卷|册)(?\d+)", - MatchOptions, RegexTimeout), - // Korean Volume: 제n권 -> Volume n, n권 -> Volume n, 63권#200.zip -> Volume 63 (no chapter, #200 is just files inside) - new Regex( - @"제?(?\d+)권", - MatchOptions, RegexTimeout), - // Korean Season: 시즌n -> Season n, - new Regex( - @"시즌(?\d+\-?\d+)", - MatchOptions, RegexTimeout), - // Korean Season: 시즌n -> Season n, n시즌 -> season n - new Regex( - @"(?\d+(\-|~)?\d+?)시즌", - MatchOptions, RegexTimeout), - // Korean Season: 시즌n -> Season n, n시즌 -> season n - new Regex( - @"시즌(?\d+(\-|~)?\d+?)", - MatchOptions, RegexTimeout), - // Japanese Volume: n巻 -> Volume n - new Regex( - @"(?\d+(?:(\-)\d+)?)巻", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] MangaSeriesRegex = new[] - { - // Grand Blue Dreaming - SP02 - new Regex( - @"(?.*)(\b|_|-|\s)(?:sp)\d", - MatchOptions, RegexTimeout), - // [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar, Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz - new Regex( - @"^(?.*)( |_)Vol\.?(\d+|tbd)", - MatchOptions, RegexTimeout), - // Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex), - // The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake - new Regex( - @"(?.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?\d+)", - MatchOptions, - RegexTimeout), - // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip - new Regex( - @"(?.*)(\b|_)v(?\d+-?\d*)(\s|_|-)", - MatchOptions, - RegexTimeout), - // Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto] - new Regex( - @"(?.*)( - )(?:v|vo|c|chapters)\d", - MatchOptions, RegexTimeout), - // Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip - new Regex( - @"(?.*)(?:, Chapter )(?\d+)", - MatchOptions, RegexTimeout), - // Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz, My Charms Are Wasted on Kuroiwa Medaka - Ch. 37.5 - Volume Extras - new Regex( - @"(?.+?)(\s|_|-)(?!Vol)(\s|_|-)((?:Chapter)|(?:Ch\.))(\s|_|-)(?\d+)", - MatchOptions, RegexTimeout), - // [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz - new Regex( - @"(?.*) (\b|_|-)(vol)\.?(\s|-|_)?\d+", - MatchOptions, RegexTimeout), - // [xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans] - new Regex( - @"(?.*) (\b|_|-)(vol)(ume)", - MatchOptions, - RegexTimeout), - //Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans] - new Regex( - @"(?.*)(\bc\d+\b)", - MatchOptions, RegexTimeout), - //Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz - new Regex( - @"(?.*)(?: _|-|\[|\()\s?vol(ume)?", - MatchOptions, RegexTimeout), - // Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz - new Regex( - @"^(?(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d", - MatchOptions, RegexTimeout), - // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) - new Regex( - @"(?.*) (\b|_|-)(v|ch\.?|c|s)\d+", - MatchOptions, RegexTimeout), - // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz - new Regex( - @"(?.*)\s+(?\d+)\s+(?:\(\d{4}\))\s", - MatchOptions, RegexTimeout), - // Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire) - new Regex( - @"(?.*) (-)?(?\d+(?:.\d+|-\d+)?) \(\d{4}\)", - MatchOptions, RegexTimeout), - // Noblesse - Episode 429 (74 Pages).7z - new Regex( - @"(?.*)(\s|_)(?:Episode|Ep\.?)(\s|_)(?\d+(?:.\d+|-\d+)?)", - MatchOptions, RegexTimeout), - // Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ) - new Regex( - @"(?.*)\(\d", - MatchOptions, RegexTimeout), - // Tonikaku Kawaii (Ch 59-67) (Ongoing) - new Regex( - @"(?.*)(\s|_)\((c\s|ch\s|chapter\s)", - MatchOptions, RegexTimeout), - // Fullmetal Alchemist chapters 101-108 - new Regex( - @"(?.+?)(\s|_|\-)+?chapters(\s|_|\-)+?\d+(\s|_|\-)+?", - MatchOptions, RegexTimeout), - // It's Witching Time! 001 (Digital) (Anonymous1234) - new Regex( - @"(?.+?)(\s|_|\-)+?\d+(\s|_|\-)\(", - MatchOptions, RegexTimeout), - //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip - // due to duplicate version identifiers in file. - new Regex( - @"(?.*)(v|s)\d+(-\d+)?(_|\s)", - MatchOptions, RegexTimeout), - //[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip - new Regex( - @"(?.*)(v|s)\d+(-\d+)?", - MatchOptions, RegexTimeout), - // Black Bullet (This is very loose, keep towards bottom) - new Regex( - @"(?.*)(_)(v|vo|c|volume)( |_)\d+", - MatchOptions, RegexTimeout), - // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar - new Regex( - @"(?.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)", - MatchOptions, RegexTimeout), - // Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1 - new Regex( - @"(?.*)( |_)(?:Chp.? ?\d+)", - MatchOptions, RegexTimeout), - // Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01 - new Regex( - @"^(?!Vol)(?.*)( |_)Chapter( |_)(\d+)", - MatchOptions, RegexTimeout), - - // Fullmetal Alchemist chapters 101-108.cbz - new Regex( - @"^(?!vol)(?.*)( |_)(chapters( |_)?)\d+-?\d*", - MatchOptions, RegexTimeout), - // Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1 - new Regex( - @"^(?!Vol\.?)(?.*)( |_|-)(?.*)ch\d+-?\d?", - MatchOptions, RegexTimeout), - // Magi - Ch.252-005.cbz - new Regex( - @"(?.*)( ?- ?)Ch\.\d+-?\d*", - MatchOptions, RegexTimeout), - // [BAA]_Darker_than_Black_Omake-1.zip - new Regex( - @"^(?!Vol)(?.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?.*)( |_)(\d+) - MatchOptions, RegexTimeout), - // Kodoja #001 (March 2016) - new Regex( - @"(?.*)(\s|_|-)#", - MatchOptions, RegexTimeout), - // Baketeriya ch01-05.zip, Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar, A Compendium of Ghosts - 031 - The Third Story_ Part 12 (Digital) (Cobalt001) - new Regex( - @"^(?!Vol\.?)(?!Chapter)(?.+?)(\s|_|-)(?.*)( |_|-)(ch?)\d+", - MatchOptions, RegexTimeout), - // Japanese Volume: n巻 -> Volume n - new Regex( - @"(?.+?)第(?\d+(?:(\-)\d+)?)巻", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] ComicSeriesRegex = new[] - { - // Tintin - T22 Vol 714 pour Sydney - new Regex( - @"(?.+?)\s?(\b|_|-)\s?((vol|tome|t)\.?)(?\d+(-\d+)?)", - MatchOptions, RegexTimeout), - // Invincible Vol 01 Family matters (2005) (Digital) - new Regex( - @"(?.+?)(\b|_)((vol|tome|t)\.?)(\s|_)(?\d+(-\d+)?)", - MatchOptions, RegexTimeout), - // Batman Beyond 2.0 001 (2013) - new Regex( - @"^(?.+?\S\.\d) (?\d+)", - MatchOptions, RegexTimeout), - // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) - new Regex( + private static readonly Regex[] ComicSeriesRegex = new[] + { + // Russian Volume: Том n -> Volume n, Тома n -> Volume + new Regex( + @"(?.+?)Том(а?)(\.?)(\s|_)?(?\d+(?:(\-)\d+)?)", + MatchOptions, RegexTimeout), + // Russian Volume: n Том -> Volume n + new Regex( + @"(?.+?)(\s|_)?(?\d+(?:(\-)\d+)?)(\s|_)Том(а?)", + MatchOptions, RegexTimeout), + // Russian Chapter: n Главa -> Chapter n + new Regex( + @"(?.+?)(?!Том)(?\d+(?:\.\d+|-\d+)?)(\s|_)(Глава|глава|Главы|Глава)", + MatchOptions, RegexTimeout), + // Russian Chapter: Главы n -> Chapter n + new Regex( + @"(?.+?)(Глава|глава|Главы|Глава)(\.?)(\s|_)?(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Tintin - T22 Vol 714 pour Sydney + new Regex( + @"(?.+?)\s?(\b|_|-)\s?((vol|tome|t)\.?)(?\d+(-\d+)?)", + MatchOptions, RegexTimeout), + // Invincible Vol 01 Family matters (2005) (Digital) + new Regex( + @"(?.+?)(\b|_)((vol|tome|t)\.?)(\s|_)(?\d+(-\d+)?)", + MatchOptions, RegexTimeout), + // Batman Beyond 2.0 001 (2013) + new Regex( + @"^(?.+?\S\.\d) (?\d+)", + MatchOptions, RegexTimeout), + // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) + new Regex( @"^(?\d+)\s(-\s|_)(?.*(\d{4})?)( |_)(\(|\d+)", - MatchOptions, RegexTimeout), - // 01 Spider-Man & Wolverine 01.cbr - new Regex( + MatchOptions, RegexTimeout), + // 01 Spider-Man & Wolverine 01.cbr + new Regex( @"^(?\d+)\s(?:-\s)(?.*) (\d+)?", - MatchOptions, RegexTimeout), - // Batman & Wildcat (1 of 3) - new Regex( + MatchOptions, RegexTimeout), + // Batman & Wildcat (1 of 3) + new Regex( @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", - MatchOptions, RegexTimeout), - // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus), Aldebaran-Antares-t6 - new Regex( - @"^(?.+?)(?: |_|-)(v|t)\d+", - MatchOptions, RegexTimeout), - // Amazing Man Comics chapter 25 - new Regex( - @"^(?.+?)(?: |_)c(hapter) \d+", - MatchOptions, RegexTimeout), - // Amazing Man Comics issue #25 - new Regex( - @"^(?.+?)(?: |_)i(ssue) #\d+", - MatchOptions, RegexTimeout), - // Batman Wayne Family Adventures - Ep. 001 - Moving In - new Regex( - @"^(?.+?)(\s|_|-)(?:Ep\.?)(\s|_|-)+\d+", - MatchOptions, RegexTimeout), - // Batgirl Vol.2000 #57 (December, 2004) - new Regex( - @"^(?.+?)Vol\.?\s?#?(?:\d+)", - MatchOptions, RegexTimeout), - // Batman & Robin the Teen Wonder #0 - new Regex( - @"^(?.*)(?: |_)#\d+", - MatchOptions, RegexTimeout), - // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) - new Regex( - @"^(?.+?)(?: \d+)", - MatchOptions, RegexTimeout), - // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) - new Regex( - @"^(?.+?)(?: |_)(?\d+)", - MatchOptions, RegexTimeout), - // The First Asterix Frieze (WebP by Doc MaKS) - new Regex( - @"^(?.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(", - MatchOptions, RegexTimeout), - // spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader) - new Regex( - @"^(?.+?)-(chapter-)?(?\d+)", - MatchOptions, RegexTimeout), - // MUST BE LAST: Batman & Daredevil - King of New York - new Regex( - @"^(?.*)", - MatchOptions, RegexTimeout), + MatchOptions, RegexTimeout), + // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus), Aldebaran-Antares-t6 + new Regex( + @"^(?.+?)(?: |_|-)(v|t)\d+", + MatchOptions, RegexTimeout), + // Amazing Man Comics chapter 25 + new Regex( + @"^(?.+?)(?: |_)c(hapter) \d+", + MatchOptions, RegexTimeout), + // Amazing Man Comics issue #25 + new Regex( + @"^(?.+?)(?: |_)i(ssue) #\d+", + MatchOptions, RegexTimeout), + // Batman Wayne Family Adventures - Ep. 001 - Moving In + new Regex( + @"^(?.+?)(\s|_|-)(?:Ep\.?)(\s|_|-)+\d+", + MatchOptions, RegexTimeout), + // Batgirl Vol.2000 #57 (December, 2004) + new Regex( + @"^(?.+?)Vol\.?\s?#?(?:\d+)", + MatchOptions, RegexTimeout), + // Batman & Robin the Teen Wonder #0 + new Regex( + @"^(?.*)(?: |_)#\d+", + MatchOptions, RegexTimeout), + // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.+?)(?: \d+)", + MatchOptions, RegexTimeout), + // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) + new Regex( + @"^(?.+?)(?: |_)(?\d+)", + MatchOptions, RegexTimeout), + // The First Asterix Frieze (WebP by Doc MaKS) + new Regex( + @"^(?.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(", + MatchOptions, RegexTimeout), + // spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader) + new Regex( + @"^(?.+?)-(chapter-)?(?\d+)", + MatchOptions, RegexTimeout), + // MUST BE LAST: Batman & Daredevil - King of New York + new Regex( + @"^(?.*)", + MatchOptions, RegexTimeout), + }; + + private static readonly Regex[] ComicVolumeRegex = new[] + { + // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?: |_)(t|v)(?\d+)", + MatchOptions, RegexTimeout), + // Batgirl Vol.2000 #57 (December, 2004) + new Regex( + @"^(?.+?)(?:\s|_)(v|vol|tome|t)\.?(\s|_)?(?\d+)", + MatchOptions, RegexTimeout), + // Chinese Volume: 第n卷 -> Volume n, 第n册 -> Volume n, 幽游白书完全版 第03卷 天下 or 阿衰online 第1册 + new Regex( + @"第(?\d+)(卷|册)", + MatchOptions, RegexTimeout), + // Chinese Volume: 卷n -> Volume n, 册n -> Volume n + new Regex( + @"(卷|册)(?\d+)", + MatchOptions, RegexTimeout), + // Korean Volume: 제n권 -> Volume n, n권 -> Volume n, 63권#200.zip + new Regex( + @"제?(?\d+)권", + MatchOptions, RegexTimeout), + // Japanese Volume: n巻 -> Volume n + new Regex( + @"(?\d+(?:(\-)\d+)?)巻", + MatchOptions, RegexTimeout), + // Russian Volume: Том n -> Volume n, Тома n -> Volume + new Regex( + @"Том(а?)(\.?)(\s|_)?(?\d+(?:(\-)\d+)?)", + MatchOptions, RegexTimeout), + // Russian Volume: n Том -> Volume n + new Regex( + @"(\s|_)?(?\d+(?:(\-)\d+)?)(\s|_)Том(а?)", + MatchOptions, RegexTimeout), + }; + + private static readonly Regex[] ComicChapterRegex = new[] + { + // Batman & Wildcat (1 of 3) + new Regex( + @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", + MatchOptions, RegexTimeout), + // Batman Beyond 04 (of 6) (1999) + new Regex( + @"(?.+?)(?\d+)(\s|_|-)?\(of", + MatchOptions, RegexTimeout), + // Batman Beyond 2.0 001 (2013) + new Regex( + @"^(?.+?\S\.\d) (?\d+)", + MatchOptions, RegexTimeout), + // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.+?)(?: |_)v(?\d+)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)", + MatchOptions, RegexTimeout), + // Batman & Robin the Teen Wonder #0 + new Regex( + @"^(?.+?)(?:\s|_)#(?\d+)", + MatchOptions, RegexTimeout), + // Batman 2016 - Chapter 01, Batman 2016 - Issue 01, Batman 2016 - Issue #01 + new Regex( + @"^(?.+?)((c(hapter)?)|issue)(_|\s)#?(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)", + MatchOptions, RegexTimeout), + // Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr + new Regex( + @"^(?.+?)(?:\s|_)(c? ?(chapter)?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", + MatchOptions, RegexTimeout), + // Batgirl Vol.2000 #57 (December, 2004) + new Regex( + @"^(?.+?)(?:vol\.?\d+)\s#(?\d+)", + MatchOptions, + RegexTimeout), + // Russian Chapter: Главы n -> Chapter n + new Regex( + @"(Глава|глава|Главы|Глава)(\.?)(\s|_)?(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Russian Chapter: n Главa -> Chapter n + new Regex( + @"(?!Том)(?\d+(?:\.\d+|-\d+)?)(\s|_)(Глава|глава|Главы|Глава)", + MatchOptions, RegexTimeout), + // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.+?)(?: (?\d+))", + MatchOptions, RegexTimeout), + // Saga 001 (2012) (Digital) (Empire-Zone) + new Regex( + @"(?.+?)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}", + MatchOptions, RegexTimeout), + // Amazing Man Comics chapter 25 + new Regex( + @"^(?!Vol)(?.+?)( |_)c(hapter)( |_)(?\d*)", + MatchOptions, RegexTimeout), + // Amazing Man Comics issue #25 + new Regex( + @"^(?!Vol)(?.+?)( |_)i(ssue)( |_) #(?\d*)", + MatchOptions, RegexTimeout), + // spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader) + new Regex( + @"^(?.+?)-(chapter-)?(?\d+)", + MatchOptions, RegexTimeout), + }; + + private static readonly Regex[] MangaChapterRegex = new[] + { + // Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5 + new Regex( + @"(\b|_)(c|ch)(\.?\s?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)", + MatchOptions, RegexTimeout), + // [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip + new Regex( + @"v\d+\.(\s|_)(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove) + new Regex( + @"^(?.*)(?: |_)#(?\d+)", + MatchOptions, RegexTimeout), + // Green Worldz - Chapter 027, Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 11-10 + new Regex( + @"^(?!Vol)(?.*)\s?(?\d+(?:\.?[\d-]+)?)", + MatchOptions, RegexTimeout), + // Russian Chapter: Главы n -> Chapter n + new Regex( + @"(Глава|глава|Главы|Глава)(\.?)(\s|_)?(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz + new Regex( + @"^(?!Vol)(?.+?)(?\d+(?:\.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)", + MatchOptions, RegexTimeout), + // Tower Of God S01 014 (CBT) (digital).cbz + new Regex( + @"(?.*)\sS(?\d+)\s(?\d+(?:.\d+|-\d+)?)", + MatchOptions, RegexTimeout), + // Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip + new Regex( + @"^((?!v|vo|vol|Volume).)*(\s|_)(?\.?\d+(?:.\d+|-\d+)?)(?b)?(\s|_|\[|\()", + MatchOptions, RegexTimeout), + // Yumekui-Merry_DKThias_Chapter21.zip + new Regex( + @"Chapter(?\d+(-\d+)?)", //(?:.\d+|-\d+)? + MatchOptions, RegexTimeout), + // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar + new Regex( + @"(?.*)(\s|_)(vol\d+)?(\s|_)Chp\.? ?(?\d+)", + MatchOptions, RegexTimeout), + // Vol 1 Chapter 2 + new Regex( + @"(?((vol|volume|v))?(\s|_)?\.?\d+)(\s|_)(Chp|Chapter)\.?(\s|_)?(?\d+)", + MatchOptions, RegexTimeout), + // Chinese Chapter: 第n话 -> Chapter n, 【TFO汉化&Petit汉化】迷你偶像漫画第25话 + new Regex( + @"第(?\d+)话", + MatchOptions, RegexTimeout), + // Korean Chapter: 제n화 -> Chapter n, 가디언즈 오브 갤럭시 죽음의 보석.E0008.7화#44 + new Regex( + @"제?(?\d+\.?\d+)(회|화|장)", + MatchOptions, RegexTimeout), + // Korean Chapter: 第10話 -> Chapter n, [ハレム]ナナとカオル ~高校生のSMごっこ~ 第1話 + new Regex( + @"第?(?\d+(?:\.\d+|-\d+)?)話", + MatchOptions, RegexTimeout), + // Russian Chapter: n Главa -> Chapter n + new Regex( + @"(?!Том)(?\d+(?:\.\d+|-\d+)?)(\s|_)(Глава|глава|Главы|Глава)", + MatchOptions, RegexTimeout), + }; + + private static readonly Regex MangaEditionRegex = new Regex( + // Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz + // To Love Ru v01 Uncensored (Ch.001-007) + @"\b(?:Omnibus(?:\s?Edition)?|Uncensored)\b", + MatchOptions, RegexTimeout + ); + + // Matches [Complete], release tags like [kmts] but not [ Complete ] or [kmts ] + private const string TagsInBrackets = $@"\[(?!\s){BalancedBrack}(? FormatTagSpecialKeywords = ImmutableArray.Create( + "Special", "Reference", "Director's Cut", "Box Set", "Box-Set", "Annual", "Anthology", "Epilogue", + "One Shot", "One-Shot", "Prologue", "TPB", "Trade Paper Back", "Omnibus", "Compendium", "Absolute", "Graphic Novel", + "GN", "FCBD"); + + private static readonly char[] LeadingZeroesTrimChars = new[] { '0' }; + + private static readonly char[] SpacesAndSeparators = { '\0', '\t', '\r', ' ', '-', ','}; + + public static MangaFormat ParseFormat(string filePath) + { + if (IsArchive(filePath)) return MangaFormat.Archive; + if (IsImage(filePath)) return MangaFormat.Image; + if (IsEpub(filePath)) return MangaFormat.Epub; + if (IsPdf(filePath)) return MangaFormat.Pdf; + return MangaFormat.Unknown; + } + + public static string ParseEdition(string filePath) + { + filePath = ReplaceUnderscores(filePath); + var match = MangaEditionRegex.Match(filePath); + return match.Success ? match.Value : string.Empty; + } + + /// + /// If the file has SP marker. + /// + /// + /// + public static bool HasSpecialMarker(string filePath) + { + return SpecialMarkerRegex.IsMatch(filePath); + } + + public static bool IsMangaSpecial(string filePath) + { + filePath = ReplaceUnderscores(filePath); + return MangaSpecialRegex.IsMatch(filePath); + } + + public static bool IsComicSpecial(string filePath) + { + filePath = ReplaceUnderscores(filePath); + return ComicSpecialRegex.IsMatch(filePath); + } + + public static string ParseSeries(string filename) + { + foreach (var regex in MangaSeriesRegex) + { + var matches = regex.Matches(filename); + foreach (var group in matches.Select(match => match.Groups["Series"]) + .Where(group => group.Success && group != Match.Empty)) + { + return CleanTitle(group.Value); + } + } + + return string.Empty; + } + public static string ParseComicSeries(string filename) + { + foreach (var regex in ComicSeriesRegex) + { + var matches = regex.Matches(filename); + foreach (var group in matches.Select(match => match.Groups["Series"]) + .Where(group => group.Success && group != Match.Empty)) + { + return CleanTitle(group.Value, true); + } + } + + return string.Empty; + } + + public static string ParseVolume(string filename) + { + foreach (var regex in MangaVolumeRegex) + { + var matches = regex.Matches(filename); + foreach (var group in matches.Select(match => match.Groups)) + { + if (!group["Volume"].Success || group["Volume"] == Match.Empty) continue; + + var value = group["Volume"].Value; + var hasPart = group["Part"].Success; + return FormatValue(value, hasPart); + } + } + + return DefaultVolume; + } + + public static string ParseComicVolume(string filename) + { + foreach (var regex in ComicVolumeRegex) + { + var matches = regex.Matches(filename); + foreach (var group in matches.Select(match => match.Groups)) + { + if (!group["Volume"].Success || group["Volume"] == Match.Empty) continue; + + var value = group["Volume"].Value; + var hasPart = group["Part"].Success; + return FormatValue(value, hasPart); + } + } + + return DefaultVolume; + } + + private static string FormatValue(string value, bool hasPart) + { + if (!value.Contains('-')) + { + return RemoveLeadingZeroes(hasPart ? AddChapterPart(value) : value); + } + + var tokens = value.Split("-"); + var from = RemoveLeadingZeroes(tokens[0]); + if (tokens.Length != 2) return from; + + var to = RemoveLeadingZeroes(hasPart ? AddChapterPart(tokens[1]) : tokens[1]); + return $"{from}-{to}"; + } + + public static string ParseChapter(string filename) + { + foreach (var regex in MangaChapterRegex) + { + var matches = regex.Matches(filename); + foreach (var groups in matches.Select(match => match.Groups)) + { + if (!groups["Chapter"].Success || groups["Chapter"] == Match.Empty) continue; + + var value = groups["Chapter"].Value; + var hasPart = groups["Part"].Success; + + return FormatValue(value, hasPart); + } + } + + return DefaultChapter; + } + + private static string AddChapterPart(string value) + { + if (value.Contains('.')) + { + return value; + } + + return $"{value}.5"; + } + + public static string ParseComicChapter(string filename) + { + foreach (var regex in ComicChapterRegex) + { + var matches = regex.Matches(filename); + foreach (var groups in matches.Select(match => match.Groups)) + { + if (!groups["Chapter"].Success || groups["Chapter"] == Match.Empty) continue; + var value = groups["Chapter"].Value; + var hasPart = groups["Part"].Success; + return FormatValue(value, hasPart); + + } + } + + return DefaultChapter; + } + + private static string RemoveEditionTagHolders(string title) + { + title = CleanupRegex.Replace(title, string.Empty); + + title = MangaEditionRegex.Replace(title, string.Empty); + + return title; + } + + private static string RemoveMangaSpecialTags(string title) + { + return MangaSpecialRegex.Replace(title, string.Empty); + } + + private static string RemoveEuropeanTags(string title) + { + return EuropeanComicRegex.Replace(title, string.Empty); + } + + private static string RemoveComicSpecialTags(string title) + { + return ComicSpecialRegex.Replace(title, string.Empty); + } + + + + /// + /// Translates _ -> spaces, trims front and back of string, removes release groups + /// + /// Hippos_the_Great [Digital], -> Hippos the Great + /// + /// + /// + /// + /// + + public static string CleanTitle(string title, bool isComic = false) + { + + title = ReplaceUnderscores(title); + + title = RemoveEditionTagHolders(title); + + if (isComic) + { + title = RemoveComicSpecialTags(title); + title = RemoveEuropeanTags(title); + } + else + { + title = RemoveMangaSpecialTags(title); + } + + title = title.Trim(SpacesAndSeparators); + + title = EmptySpaceRegex.Replace(title, " "); + + return title; + } + + + /// + /// Pads the start of a number string with 0's so ordering works fine if there are over 100 items. + /// Handles ranges (ie 4-8) -> (004-008). + /// + /// + /// A zero padded number + public static string PadZeros(string number) + { + if (!number.Contains('-')) return PerformPadding(number); + + var tokens = number.Split("-"); + return $"{PerformPadding(tokens[0])}-{PerformPadding(tokens[1])}"; + } + + private static string PerformPadding(string number) + { + var num = int.Parse(number); + return num switch + { + < 10 => "00" + num, + < 100 => "0" + num, + _ => number }; + } - private static readonly Regex[] ComicVolumeRegex = new[] + public static string RemoveLeadingZeroes(string title) + { + var ret = title.TrimStart(LeadingZeroesTrimChars); + return string.IsNullOrEmpty(ret) ? "0" : ret; + } + + public static bool IsArchive(string filePath) + { + return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath)); + } + public static bool IsComicInfoExtension(string filePath) + { + return ComicInfoArchiveRegex.IsMatch(Path.GetExtension(filePath)); + } + public static bool IsBook(string filePath) + { + return BookFileRegex.IsMatch(Path.GetExtension(filePath)); + } + + public static bool IsImage(string filePath) + { + return !filePath.StartsWith(".") && ImageRegex.IsMatch(Path.GetExtension(filePath)); + } + + public static bool IsXml(string filePath) + { + return XmlRegex.IsMatch(Path.GetExtension(filePath)); + } + + + public static float MinNumberFromRange(string range) + { + try { - // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) - new Regex( - @"^(?.*)(?: |_)(t|v)(?\d+)", - MatchOptions, RegexTimeout), - // Batgirl Vol.2000 #57 (December, 2004) - new Regex( - @"^(?.+?)(?:\s|_)(v|vol|tome|t)\.?(\s|_)?(?\d+)", - MatchOptions, RegexTimeout), - // Chinese Volume: 第n卷 -> Volume n, 第n册 -> Volume n, 幽游白书完全版 第03卷 天下 or 阿衰online 第1册 - new Regex( - @"第(?\d+)(卷|册)", - MatchOptions, RegexTimeout), - // Chinese Volume: 卷n -> Volume n, 册n -> Volume n - new Regex( - @"(卷|册)(?\d+)", - MatchOptions, RegexTimeout), - // Korean Volume: 제n권 -> Volume n, n권 -> Volume n, 63권#200.zip - new Regex( - @"제?(?\d+)권", - MatchOptions, RegexTimeout), - // Japanese Volume: n巻 -> Volume n - new Regex( - @"(?\d+(?:(\-)\d+)?)巻", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] ComicChapterRegex = new[] - { - // Batman & Wildcat (1 of 3) - new Regex( - @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", - MatchOptions, RegexTimeout), - // Batman Beyond 04 (of 6) (1999) - new Regex( - @"(?.+?)(?\d+)(\s|_|-)?\(of", - MatchOptions, RegexTimeout), - // Batman Beyond 2.0 001 (2013) - new Regex( - @"^(?.+?\S\.\d) (?\d+)", - MatchOptions, RegexTimeout), - // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) - new Regex( - @"^(?.+?)(?: |_)v(?\d+)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)", - MatchOptions, RegexTimeout), - // Batman & Robin the Teen Wonder #0 - new Regex( - @"^(?.+?)(?:\s|_)#(?\d+)", - MatchOptions, RegexTimeout), - // Batman 2016 - Chapter 01, Batman 2016 - Issue 01, Batman 2016 - Issue #01 - new Regex( - @"^(?.+?)((c(hapter)?)|issue)(_|\s)#?(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)", - MatchOptions, RegexTimeout), - // Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr - new Regex( - @"^(?.+?)(?:\s|_)(c? ?(chapter)?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", - MatchOptions, RegexTimeout), - // Batgirl Vol.2000 #57 (December, 2004) - new Regex( - @"^(?.+?)(?:vol\.?\d+)\s#(?\d+)", - MatchOptions, - RegexTimeout), - // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) - new Regex( - @"^(?.+?)(?: (?\d+))", - MatchOptions, RegexTimeout), - - // Saga 001 (2012) (Digital) (Empire-Zone) - new Regex( - @"(?.+?)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}", - MatchOptions, RegexTimeout), - // Amazing Man Comics chapter 25 - new Regex( - @"^(?!Vol)(?.+?)( |_)c(hapter)( |_)(?\d*)", - MatchOptions, RegexTimeout), - // Amazing Man Comics issue #25 - new Regex( - @"^(?!Vol)(?.+?)( |_)i(ssue)( |_) #(?\d*)", - MatchOptions, RegexTimeout), - // spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader) - new Regex( - @"^(?.+?)-(chapter-)?(?\d+)", - MatchOptions, RegexTimeout), - - }; - - private static readonly Regex[] ReleaseGroupRegex = new[] - { - // [TrinityBAKumA Finella&anon], [BAA]_, [SlowManga&OverloadScans], [batoto] - new Regex(@"(?:\[(?(?!\s).+?(?(?!\s).+?(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)", - MatchOptions, RegexTimeout), - // [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip - new Regex( - @"v\d+\.(?\d+(?:.\d+|-\d+)?)", - MatchOptions, RegexTimeout), - // Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove) - new Regex( - @"^(?.*)(?: |_)#(?\d+)", - MatchOptions, RegexTimeout), - // Green Worldz - Chapter 027, Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 11-10 - new Regex( - @"^(?!Vol)(?.*)\s?(?\d+(?:\.?[\d-]+)?)", - MatchOptions, RegexTimeout), - // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz - new Regex( - @"^(?!Vol)(?.+?)(?\d+(?:\.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)", - MatchOptions, RegexTimeout), - // Tower Of God S01 014 (CBT) (digital).cbz - new Regex( - @"(?.*)\sS(?\d+)\s(?\d+(?:.\d+|-\d+)?)", - MatchOptions, RegexTimeout), - // Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip - new Regex( - @"^((?!v|vo|vol|Volume).)*(\s|_)(?\.?\d+(?:.\d+|-\d+)?)(?b)?(\s|_|\[|\()", - MatchOptions, RegexTimeout), - // Yumekui-Merry_DKThias_Chapter21.zip - new Regex( - @"Chapter(?\d+(-\d+)?)", //(?:.\d+|-\d+)? - MatchOptions, RegexTimeout), - // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar - new Regex( - @"(?.*)(\s|_)(vol\d+)?(\s|_)Chp\.? ?(?\d+)", - MatchOptions, RegexTimeout), - // Vol 1 Chapter 2 - new Regex( - @"(?((vol|volume|v))?(\s|_)?\.?\d+)(\s|_)(Chp|Chapter)\.?(\s|_)?(?\d+)", - MatchOptions, RegexTimeout), - // Chinese Chapter: 第n话 -> Chapter n, 【TFO汉化&Petit汉化】迷你偶像漫画第25话 - new Regex( - @"第(?\d+)话", - MatchOptions, RegexTimeout), - // Korean Chapter: 제n화 -> Chapter n, 가디언즈 오브 갤럭시 죽음의 보석.E0008.7화#44 - new Regex( - @"제?(?\d+\.?\d+)(화|장)", - MatchOptions, RegexTimeout), - // Korean Chapter: 第10話 -> Chapter n, [ハレム]ナナとカオル ~高校生のSMごっこ~ 第1話 - new Regex( - @"第?(?\d+(?:.\d+|-\d+)?)話", - MatchOptions, RegexTimeout), - }; - private static readonly Regex[] MangaEditionRegex = { - // Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz - new Regex( - @"(\b|_)(?Omnibus(( |_)?Edition)?)(\b|_)?", - MatchOptions, RegexTimeout), - // To Love Ru v01 Uncensored (Ch.001-007) - new Regex( - @"(\b|_)(?Uncensored)(\b|_)", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] CleanupRegex = - { - // (), {}, [] - new Regex( - @"(?(\{\}|\[\]|\(\)))", - MatchOptions, RegexTimeout), - // (Complete) - new Regex( - @"(?(\{Complete\}|\[Complete\]|\(Complete\)))", - MatchOptions, RegexTimeout), - // Anything in parenthesis - new Regex( - @"\(.*\)", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] MangaSpecialRegex = - { - // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. - new Regex( - @"(?Specials?|OneShot|One\-Shot|Omake|Extra(?:(\sChapter)?[^\S])|Art Collection|Side( |_)Stories|Bonus)", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] ComicSpecialRegex = - { - // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. - new Regex( - @"(?Specials?|OneShot|One\-Shot|\d.+?(\W|_|-)Annual|Annual(\W|_|-)\d.+?|Extra(?:(\sChapter)?[^\S])|Book \d.+?|Compendium \d.+?|Omnibus \d.+?|[_\s\-]TPB[_\s\-]|FCBD \d.+?|Absolute \d.+?|Preview \d.+?|Art Collection|Side(\s|_)Stories|Bonus|Hors Série|(\W|_|-)HS(\W|_|-)|(\W|_|-)THS(\W|_|-))", - MatchOptions, RegexTimeout), - }; - - private static readonly Regex[] EuropeanComicRegex = - { - // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. - new Regex( - @"(?Bd(\s|_|-)Fr)", - MatchOptions, RegexTimeout), - }; - - // If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found. - private static readonly Regex SpecialMarkerRegex = new Regex( - @"(?SP\d+)", - MatchOptions, RegexTimeout - ); - - private static readonly Regex EmptySpaceRegex = new Regex( - @"(?!=.+)(\s{2,})(?!=.+)", - MatchOptions, RegexTimeout - ); - - private static readonly ImmutableArray FormatTagSpecialKeywords = ImmutableArray.Create( - "Special", "Reference", "Director's Cut", "Box Set", "Box-Set", "Annual", "Anthology", "Epilogue", - "One Shot", "One-Shot", "Prologue", "TPB", "Trade Paper Back", "Omnibus", "Compendium", "Absolute", "Graphic Novel", - "GN", "FCBD"); - - private static readonly char[] LeadingZeroesTrimChars = new[] { '0' }; - - public static MangaFormat ParseFormat(string filePath) - { - if (IsArchive(filePath)) return MangaFormat.Archive; - if (IsImage(filePath)) return MangaFormat.Image; - if (IsEpub(filePath)) return MangaFormat.Epub; - if (IsPdf(filePath)) return MangaFormat.Pdf; - return MangaFormat.Unknown; - } - - public static string ParseEdition(string filePath) - { - foreach (var regex in MangaEditionRegex) - { - var matches = regex.Matches(filePath); - foreach (var group in matches.Select(match => match.Groups["Edition"]) - .Where(group => group.Success && group != Match.Empty)) - { - return group.Value - .Replace("{", "").Replace("}", "") - .Replace("[", "").Replace("]", "") - .Replace("(", "").Replace(")", ""); - } - } - - return string.Empty; - } - - /// - /// If the file has SP marker. - /// - /// - /// - public static bool HasSpecialMarker(string filePath) - { - var matches = SpecialMarkerRegex.Matches(filePath); - return matches.Select(match => match.Groups["Special"]) - .Any(group => group.Success && group != Match.Empty); - } - - public static string ParseMangaSpecial(string filePath) - { - foreach (var regex in MangaSpecialRegex) - { - var matches = regex.Matches(filePath); - foreach (var group in matches.Select(match => match.Groups["Special"]) - .Where(group => group.Success && group != Match.Empty)) - { - return group.Value; - } - } - - return string.Empty; - } - - public static string ParseComicSpecial(string filePath) - { - foreach (var regex in ComicSpecialRegex) - { - var matches = regex.Matches(filePath); - foreach (var group in matches.Select(match => match.Groups["Special"]) - .Where(group => group.Success && group != Match.Empty)) - { - return group.Value; - } - } - - return string.Empty; - } - - public static string ParseSeries(string filename) - { - foreach (var regex in MangaSeriesRegex) - { - var matches = regex.Matches(filename); - foreach (var group in matches.Select(match => match.Groups["Series"]) - .Where(group => group.Success && group != Match.Empty)) - { - return CleanTitle(group.Value); - } - } - - return string.Empty; - } - public static string ParseComicSeries(string filename) - { - foreach (var regex in ComicSeriesRegex) - { - var matches = regex.Matches(filename); - foreach (var group in matches.Select(match => match.Groups["Series"]) - .Where(group => group.Success && group != Match.Empty)) - { - return CleanTitle(group.Value, true); - } - } - - return string.Empty; - } - - public static string ParseVolume(string filename) - { - foreach (var regex in MangaVolumeRegex) - { - var matches = regex.Matches(filename); - foreach (Match match in matches) - { - if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue; - - var value = match.Groups["Volume"].Value; - var hasPart = match.Groups["Part"].Success; - return FormatValue(value, hasPart); - } - } - - return DefaultVolume; - } - - public static string ParseComicVolume(string filename) - { - foreach (var regex in ComicVolumeRegex) - { - var matches = regex.Matches(filename); - foreach (var group in matches.Select(match => match.Groups)) - { - if (!group["Volume"].Success || group["Volume"] == Match.Empty) continue; - - var value = group["Volume"].Value; - var hasPart = group["Part"].Success; - return FormatValue(value, hasPart); - } - } - - return DefaultVolume; - } - - private static string FormatValue(string value, bool hasPart) - { - if (!value.Contains('-')) - { - return RemoveLeadingZeroes(hasPart ? AddChapterPart(value) : value); - } - - var tokens = value.Split("-"); - var from = RemoveLeadingZeroes(tokens[0]); - if (tokens.Length != 2) return from; - - var to = RemoveLeadingZeroes(hasPart ? AddChapterPart(tokens[1]) : tokens[1]); - return $"{from}-{to}"; - } - - public static string ParseChapter(string filename) - { - foreach (var regex in MangaChapterRegex) - { - var matches = regex.Matches(filename); - foreach (Match match in matches) - { - if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue; - - var value = match.Groups["Chapter"].Value; - var hasPart = match.Groups["Part"].Success; - - return FormatValue(value, hasPart); - } - } - - return DefaultChapter; - } - - private static string AddChapterPart(string value) - { - if (value.Contains('.')) - { - return value; - } - - return $"{value}.5"; - } - - public static string ParseComicChapter(string filename) - { - foreach (var regex in ComicChapterRegex) - { - var matches = regex.Matches(filename); - foreach (Match match in matches) - { - if (match.Groups["Chapter"].Success && match.Groups["Chapter"] != Match.Empty) - { - var value = match.Groups["Chapter"].Value; - var hasPart = match.Groups["Part"].Success; - return FormatValue(value, hasPart); - } - - } - } - - return DefaultChapter; - } - - private static string RemoveEditionTagHolders(string title) - { - foreach (var regex in CleanupRegex) - { - var matches = regex.Matches(title); - foreach (Match match in matches) - { - if (match.Success) - { - title = title.Replace(match.Value, string.Empty).Trim(); - } - } - } - - foreach (var regex in MangaEditionRegex) - { - var matches = regex.Matches(title); - foreach (Match match in matches) - { - if (match.Success) - { - title = title.Replace(match.Value, string.Empty).Trim(); - } - } - } - - return title; - } - - private static string RemoveMangaSpecialTags(string title) - { - foreach (var regex in MangaSpecialRegex) - { - var matches = regex.Matches(title); - foreach (var match in matches.Where(m => m.Success)) - { - title = title.Replace(match.Value, string.Empty).Trim(); - } - } - - return title; - } - - private static string RemoveEuropeanTags(string title) - { - foreach (var regex in EuropeanComicRegex) - { - var matches = regex.Matches(title); - foreach (var match in matches.Where(m => m.Success)) - { - title = title.Replace(match.Value, string.Empty).Trim(); - } - } - - return title; - } - - private static string RemoveComicSpecialTags(string title) - { - foreach (var regex in ComicSpecialRegex) - { - var matches = regex.Matches(title); - foreach (var match in matches.Where(m => m.Success)) - { - title = title.Replace(match.Value, string.Empty).Trim(); - } - } - - return title; - } - - - - /// - /// Translates _ -> spaces, trims front and back of string, removes release groups - /// - /// Hippos_the_Great [Digital], -> Hippos the Great - /// - /// - /// - /// - /// - public static string CleanTitle(string title, bool isComic = false) - { - title = RemoveReleaseGroup(title); - - title = RemoveEditionTagHolders(title); - - title = isComic ? RemoveComicSpecialTags(title) : RemoveMangaSpecialTags(title); - - if (isComic) - { - title = RemoveComicSpecialTags(title); - title = RemoveEuropeanTags(title); - } - else - { - title = RemoveMangaSpecialTags(title); - } - - - title = title.Replace("_", " ").Trim(); - if (title.EndsWith("-") || title.EndsWith(",")) - { - title = title.Substring(0, title.Length - 1); - } - - if (title.StartsWith("-") || title.StartsWith(",")) - { - title = title.Substring(1); - } - - title = EmptySpaceRegex.Replace(title, " "); - - return title.Trim(); - } - - private static string RemoveReleaseGroup(string title) - { - foreach (var regex in ReleaseGroupRegex) - { - var matches = regex.Matches(title); - foreach (var match in matches.Where(m => m.Success)) - { - title = title.Replace(match.Value, string.Empty); - } - } - - return title; - } - - - /// - /// Pads the start of a number string with 0's so ordering works fine if there are over 100 items. - /// Handles ranges (ie 4-8) -> (004-008). - /// - /// - /// A zero padded number - public static string PadZeros(string number) - { - if (!number.Contains('-')) return PerformPadding(number); - - var tokens = number.Split("-"); - return $"{PerformPadding(tokens[0])}-{PerformPadding(tokens[1])}"; - } - - private static string PerformPadding(string number) - { - var num = int.Parse(number); - return num switch - { - < 10 => "00" + num, - < 100 => "0" + num, - _ => number - }; - } - - public static string RemoveLeadingZeroes(string title) - { - var ret = title.TrimStart(LeadingZeroesTrimChars); - return string.IsNullOrEmpty(ret) ? "0" : ret; - } - - public static bool IsArchive(string filePath) - { - return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath)); - } - public static bool IsComicInfoExtension(string filePath) - { - return ComicInfoArchiveRegex.IsMatch(Path.GetExtension(filePath)); - } - public static bool IsBook(string filePath) - { - return BookFileRegex.IsMatch(Path.GetExtension(filePath)); - } - - public static bool IsImage(string filePath) - { - return !filePath.StartsWith(".") && ImageRegex.IsMatch(Path.GetExtension(filePath)); - } - - public static bool IsXml(string filePath) - { - return XmlRegex.IsMatch(Path.GetExtension(filePath)); - } - - - public static float MinNumberFromRange(string range) - { - try - { - if (!Regex.IsMatch(range, @"^[\d-.]+$")) - { - return (float) 0.0; - } - - var tokens = range.Replace("_", string.Empty).Split("-"); - return tokens.Min(float.Parse); - } - catch + if (!Regex.IsMatch(range, @"^[\d\-.]+$")) { return (float) 0.0; } + + var tokens = range.Replace("_", string.Empty).Split("-"); + return tokens.Min(float.Parse); } - - public static float MaxNumberFromRange(string range) + catch { - try - { - if (!Regex.IsMatch(range, @"^[\d-.]+$")) - { - return (float) 0.0; - } - - var tokens = range.Replace("_", string.Empty).Split("-"); - return tokens.Max(float.Parse); - } - catch - { - return (float) 0.0; - } - } - - public static string Normalize(string name) - { - return NormalizeRegex.Replace(name, string.Empty).ToLower(); - } - - /// - /// Responsible for preparing special title for rendering to the UI. Replaces _ with ' ' and strips out SP\d+ - /// - /// - /// - public static string CleanSpecialTitle(string name) - { - if (string.IsNullOrEmpty(name)) return name; - var cleaned = SpecialTokenRegex.Replace(name.Replace('_', ' '), string.Empty).Trim(); - var lastIndex = cleaned.LastIndexOf('.'); - if (lastIndex > 0) - { - cleaned = cleaned.Substring(0, cleaned.LastIndexOf('.')).Trim(); - } - - return string.IsNullOrEmpty(cleaned) ? name : cleaned; - } - - - /// - /// Tests whether the file is a cover image such that: contains "cover", is named "folder", and is an image - /// - /// If the path has "backcover" in it, it will be ignored - /// Filename with extension - /// - public static bool IsCoverImage(string filename) - { - return IsImage(filename) && CoverImageRegex.IsMatch(filename); - } - - /// - /// Validates that a Path doesn't start with certain blacklisted folders, like __MACOSX, @Recently-Snapshot, etc and that if a full path, the filename - /// doesn't start with ._, which is a metadata file on MACOSX. - /// - /// - /// - public static bool HasBlacklistedFolderInPath(string path) - { - return path.Contains("__MACOSX") || path.StartsWith("@Recently-Snapshot") || path.StartsWith("@recycle") || path.StartsWith("._") || Path.GetFileName(path).StartsWith("._") || path.Contains(".qpkg"); - } - - - public static bool IsEpub(string filePath) - { - return Path.GetExtension(filePath).Equals(".epub", StringComparison.InvariantCultureIgnoreCase); - } - - public static bool IsPdf(string filePath) - { - return Path.GetExtension(filePath).Equals(".pdf", StringComparison.InvariantCultureIgnoreCase); - } - - /// - /// Cleans an author's name - /// - /// If the author is Last, First, this will not reverse - /// - /// - public static string CleanAuthor(string author) - { - return string.IsNullOrEmpty(author) ? string.Empty : author.Trim(); - } - - /// - /// Normalizes the slashes in a path to be - /// - /// /manga/1\1 -> /manga/1/1 - /// - /// - public static string NormalizePath(string path) - { - return path.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar) - .Replace(@"//", Path.AltDirectorySeparatorChar + string.Empty); - } - - /// - /// Checks against a set of strings to validate if a ComicInfo.Format should receive special treatment - /// - /// - /// - public static bool HasComicInfoSpecial(string comicInfoFormat) - { - return FormatTagSpecialKeywords.Contains(comicInfoFormat); + return (float) 0.0; } } + + public static float MaxNumberFromRange(string range) + { + try + { + if (!Regex.IsMatch(range, @"^[\d\-.]+$")) + { + return (float) 0.0; + } + + var tokens = range.Replace("_", string.Empty).Split("-"); + return tokens.Max(float.Parse); + } + catch + { + return (float) 0.0; + } + } + + public static string Normalize(string name) + { + return NormalizeRegex.Replace(name, string.Empty).ToLower(); + } + + /// + /// Responsible for preparing special title for rendering to the UI. Replaces _ with ' ' and strips out SP\d+ + /// + /// + /// + public static string CleanSpecialTitle(string name) + { + if (string.IsNullOrEmpty(name)) return name; + var cleaned = SpecialTokenRegex.Replace(name.Replace('_', ' '), string.Empty).Trim(); + var lastIndex = cleaned.LastIndexOf('.'); + if (lastIndex > 0) + { + cleaned = cleaned.Substring(0, cleaned.LastIndexOf('.')).Trim(); + } + + return string.IsNullOrEmpty(cleaned) ? name : cleaned; + } + + + /// + /// Tests whether the file is a cover image such that: contains "cover", is named "folder", and is an image + /// + /// If the path has "backcover" in it, it will be ignored + /// Filename with extension + /// + public static bool IsCoverImage(string filename) + { + return IsImage(filename) && CoverImageRegex.IsMatch(filename); + } + + /// + /// Validates that a Path doesn't start with certain blacklisted folders, like __MACOSX, @Recently-Snapshot, etc and that if a full path, the filename + /// doesn't start with ._, which is a metadata file on MACOSX. + /// + /// + /// + public static bool HasBlacklistedFolderInPath(string path) + { + return path.Contains("__MACOSX") || path.StartsWith("@Recently-Snapshot") || path.StartsWith("@recycle") || path.StartsWith("._") || Path.GetFileName(path).StartsWith("._") || path.Contains(".qpkg"); + } + + + public static bool IsEpub(string filePath) + { + return Path.GetExtension(filePath).Equals(".epub", StringComparison.InvariantCultureIgnoreCase); + } + + public static bool IsPdf(string filePath) + { + return Path.GetExtension(filePath).Equals(".pdf", StringComparison.InvariantCultureIgnoreCase); + } + + /// + /// Cleans an author's name + /// + /// If the author is Last, First, this will not reverse + /// + /// + public static string CleanAuthor(string author) + { + return string.IsNullOrEmpty(author) ? string.Empty : author.Trim(); + } + + /// + /// Normalizes the slashes in a path to be + /// + /// /manga/1\1 -> /manga/1/1 + /// + /// + public static string NormalizePath(string path) + { + return path.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar) + .Replace(@"//", Path.AltDirectorySeparatorChar + string.Empty); + } + + /// + /// Checks against a set of strings to validate if a ComicInfo.Format should receive special treatment + /// + /// + /// + public static bool HasComicInfoSpecial(string comicInfoFormat) + { + return FormatTagSpecialKeywords.Contains(comicInfoFormat); + } + + private static string ReplaceUnderscores(string name) => name?.Replace("_", " "); } diff --git a/API/Services/Tasks/Scanner/Parser/ParserInfo.cs b/API/Services/Tasks/Scanner/Parser/ParserInfo.cs index 4a0a3fdc6..1f0a9d692 100644 --- a/API/Services/Tasks/Scanner/Parser/ParserInfo.cs +++ b/API/Services/Tasks/Scanner/Parser/ParserInfo.cs @@ -2,100 +2,99 @@ using API.Entities.Enums; using API.Services.Tasks.Scanner.Parser; -namespace API.Parser +namespace API.Parser; + +/// +/// This represents all parsed information from a single file +/// +public class ParserInfo { /// - /// This represents all parsed information from a single file + /// Represents the parsed chapters from a file. By default, will be 0 which means nothing could be parsed. + /// The chapters can only be a single float or a range of float ie) 1-2. Mainly floats should be multiples of 0.5 representing specials /// - public class ParserInfo + public string Chapters { get; set; } = ""; + /// + /// Represents the parsed series from the file or folder + /// + public string Series { get; set; } = string.Empty; + /// + /// This can be filled in from ComicInfo.xml/Epub during scanning. Will update the SortName field on + /// + public string SeriesSort { get; set; } = string.Empty; + /// + /// This can be filled in from ComicInfo.xml/Epub during scanning. Will update the LocalizedName field on + /// + public string LocalizedSeries { get; set; } = string.Empty; + /// + /// Represents the parsed volumes from a file. By default, will be 0 which means that nothing could be parsed. + /// If Volumes is 0 and Chapters is 0, the file is a special. If Chapters is non-zero, then no volume could be parsed. + /// Beastars Vol 3-4 will map to "3-4" + /// The volumes can only be a single int or a range of ints ie) 1-2. Float based volumes are not supported. + /// + public string Volumes { get; set; } = ""; + /// + /// Filename of the underlying file + /// Beastars v01 (digital).cbz + /// + public string Filename { get; init; } = ""; + /// + /// Full filepath of the underlying file + /// C:/Manga/Beastars v01 (digital).cbz + /// + public string FullFilePath { get; set; } = ""; + + /// + /// that represents the type of the file + /// Mainly used to show in the UI and so caching service knows how to cache for reading. + /// + public MangaFormat Format { get; set; } = MangaFormat.Unknown; + + /// + /// This can potentially story things like "Omnibus, Color, Full Contact Edition, Extra, Final, etc" + /// + /// Not Used in Database + public string Edition { get; set; } = ""; + + /// + /// If the file contains no volume/chapter information or contains Special Keywords + /// + public bool IsSpecial { get; set; } + + /// + /// Used for specials or books, stores what the UI should show. + /// Manga does not use this field + /// + public string Title { get; set; } = string.Empty; + + /// + /// If the ParserInfo has the IsSpecial tag or both volumes and chapters are default aka 0 + /// + /// + public bool IsSpecialInfo() { - /// - /// Represents the parsed chapters from a file. By default, will be 0 which means nothing could be parsed. - /// The chapters can only be a single float or a range of float ie) 1-2. Mainly floats should be multiples of 0.5 representing specials - /// - public string Chapters { get; set; } = ""; - /// - /// Represents the parsed series from the file or folder - /// - public string Series { get; set; } = string.Empty; - /// - /// This can be filled in from ComicInfo.xml/Epub during scanning. Will update the SortName field on - /// - public string SeriesSort { get; set; } = string.Empty; - /// - /// This can be filled in from ComicInfo.xml/Epub during scanning. Will update the LocalizedName field on - /// - public string LocalizedSeries { get; set; } = string.Empty; - /// - /// Represents the parsed volumes from a file. By default, will be 0 which means that nothing could be parsed. - /// If Volumes is 0 and Chapters is 0, the file is a special. If Chapters is non-zero, then no volume could be parsed. - /// Beastars Vol 3-4 will map to "3-4" - /// The volumes can only be a single int or a range of ints ie) 1-2. Float based volumes are not supported. - /// - public string Volumes { get; set; } = ""; - /// - /// Filename of the underlying file - /// Beastars v01 (digital).cbz - /// - public string Filename { get; init; } = ""; - /// - /// Full filepath of the underlying file - /// C:/Manga/Beastars v01 (digital).cbz - /// - public string FullFilePath { get; set; } = ""; + return (IsSpecial || (Volumes == "0" && Chapters == "0")); + } - /// - /// that represents the type of the file - /// Mainly used to show in the UI and so caching service knows how to cache for reading. - /// - public MangaFormat Format { get; set; } = MangaFormat.Unknown; + /// + /// This will contain any EXTRA comicInfo information parsed from the epub or archive. If there is an archive with comicInfo.xml AND it contains + /// series, volume information, that will override what we parsed. + /// + public ComicInfo ComicInfo { get; set; } - /// - /// This can potentially story things like "Omnibus, Color, Full Contact Edition, Extra, Final, etc" - /// - /// Not Used in Database - public string Edition { get; set; } = ""; - - /// - /// If the file contains no volume/chapter information or contains Special Keywords - /// - public bool IsSpecial { get; set; } - - /// - /// Used for specials or books, stores what the UI should show. - /// Manga does not use this field - /// - public string Title { get; set; } = string.Empty; - - /// - /// If the ParserInfo has the IsSpecial tag or both volumes and chapters are default aka 0 - /// - /// - public bool IsSpecialInfo() - { - return (IsSpecial || (Volumes == "0" && Chapters == "0")); - } - - /// - /// This will contain any EXTRA comicInfo information parsed from the epub or archive. If there is an archive with comicInfo.xml AND it contains - /// series, volume information, that will override what we parsed. - /// - public ComicInfo ComicInfo { get; set; } - - /// - /// Merges non empty/null properties from info2 into this entity. - /// - /// This does not merge ComicInfo as they should always be the same - /// - public void Merge(ParserInfo info2) - { - if (info2 == null) return; - Chapters = string.IsNullOrEmpty(Chapters) || Chapters == "0" ? info2.Chapters: Chapters; - Volumes = string.IsNullOrEmpty(Volumes) || Volumes == "0" ? info2.Volumes : Volumes; - Edition = string.IsNullOrEmpty(Edition) ? info2.Edition : Edition; - Title = string.IsNullOrEmpty(Title) ? info2.Title : Title; - Series = string.IsNullOrEmpty(Series) ? info2.Series : Series; - IsSpecial = IsSpecial || info2.IsSpecial; - } + /// + /// Merges non empty/null properties from info2 into this entity. + /// + /// This does not merge ComicInfo as they should always be the same + /// + public void Merge(ParserInfo info2) + { + if (info2 == null) return; + Chapters = string.IsNullOrEmpty(Chapters) || Chapters == "0" ? info2.Chapters: Chapters; + Volumes = string.IsNullOrEmpty(Volumes) || Volumes == "0" ? info2.Volumes : Volumes; + Edition = string.IsNullOrEmpty(Edition) ? info2.Edition : Edition; + Title = string.IsNullOrEmpty(Title) ? info2.Title : Title; + Series = string.IsNullOrEmpty(Series) ? info2.Series : Series; + IsSpecial = IsSpecial || info2.IsSpecial; } } diff --git a/API/Services/Tasks/Scanner/ProcessSeries.cs b/API/Services/Tasks/Scanner/ProcessSeries.cs index e8db2a97a..c61b72bdb 100644 --- a/API/Services/Tasks/Scanner/ProcessSeries.cs +++ b/API/Services/Tasks/Scanner/ProcessSeries.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.Linq; +using System.Threading; using System.Threading.Tasks; using API.Data; using API.Data.Metadata; @@ -14,6 +15,7 @@ using API.Parser; using API.Services.Tasks.Metadata; using API.SignalR; using Hangfire; +using Kavita.Common; using Microsoft.Extensions.Logging; namespace API.Services.Tasks.Scanner; @@ -48,8 +50,6 @@ public class ProcessSeries : IProcessSeries private IList _people; private IList _tags; - - public ProcessSeries(IUnitOfWork unitOfWork, ILogger logger, IEventHub eventHub, IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService, IFileService fileService, IMetadataService metadataService, IWordCountAnalyzerService wordCountAnalyzerService) @@ -108,6 +108,7 @@ public class ProcessSeries : IProcessSeries { seriesAdded = true; series = DbFactory.Series(firstInfo.Series, firstInfo.LocalizedSeries); + _unitOfWork.SeriesRepository.Add(series); } if (series.LibraryId == 0) series.LibraryId = library.Id; @@ -116,7 +117,8 @@ public class ProcessSeries : IProcessSeries { _logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName); - var firstParsedInfo = parsedInfos[0]; + // parsedInfos[0] is not the first volume or chapter. We need to find it using a ComicInfo check (as it uses firstParsedInfo for series sort) + var firstParsedInfo = parsedInfos.FirstOrDefault(p => p.ComicInfo != null, firstInfo); UpdateVolumes(series, parsedInfos); series.Pages = series.Volumes.Sum(v => v.Pages); @@ -155,7 +157,6 @@ public class ProcessSeries : IProcessSeries await UpdateSeriesFolderPath(parsedInfos, library, series); series.LastFolderScanned = DateTime.Now; - _unitOfWork.SeriesRepository.Attach(series); if (_unitOfWork.HasChanges()) { @@ -166,7 +167,9 @@ public class ProcessSeries : IProcessSeries catch (Exception ex) { await _unitOfWork.RollbackAsync(); - _logger.LogCritical(ex, "[ScannerService] There was an issue writing to the for series {@SeriesName}", series); + _logger.LogCritical(ex, + "[ScannerService] There was an issue writing to the database for series {@SeriesName}", + series.Name); await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series}", @@ -210,13 +213,13 @@ public class ProcessSeries : IProcessSeries if (!library.Folders.Select(f => f.Path).Contains(seriesDirs.Keys.First())) { series.FolderPath = Parser.Parser.NormalizePath(seriesDirs.Keys.First()); + _logger.LogDebug("Updating {Series} FolderPath to {FolderPath}", series.Name, series.FolderPath); } } } public void EnqueuePostSeriesProcessTasks(int libraryId, int seriesId, bool forceUpdate = false) { - //BackgroundJob.Enqueue(() => _metadataService.GenerateCoversForSeries(libraryId, seriesId, forceUpdate)); BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(libraryId, seriesId, forceUpdate)); } @@ -233,12 +236,9 @@ public class ProcessSeries : IProcessSeries var chapters = series.Volumes.SelectMany(volume => volume.Chapters).ToList(); // Update Metadata based on Chapter metadata - series.Metadata.ReleaseYear = chapters.Min(c => c.ReleaseDate.Year); - - if (series.Metadata.ReleaseYear < 1000) + if (!series.Metadata.ReleaseYearLocked) { - // Not a valid year, default to 0 - series.Metadata.ReleaseYear = 0; + series.Metadata.ReleaseYear = chapters.MinimumReleaseYear(); } // Set the AgeRating as highest in all the comicInfos @@ -438,7 +438,22 @@ public class ProcessSeries : IProcessSeries _logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name); foreach (var volumeNumber in distinctVolumes) { - var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber); + _logger.LogDebug("[ScannerService] Looking up volume for {VolumeNumber}", volumeNumber); + Volume volume; + try + { + volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber); + } + catch (Exception ex) + { + if (ex.Message.Equals("Sequence contains more than one matching element")) + { + _logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name); + throw new KavitaException( + $"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan"); + } + throw; + } if (volume == null) { volume = DbFactory.Volume(volumeNumber); @@ -457,7 +472,7 @@ public class ProcessSeries : IProcessSeries foreach (var chapter in volume.Chapters) { var firstFile = chapter.Files.MinBy(x => x.Chapter); - if (firstFile == null || _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, firstFile)) continue; + if (firstFile == null || _cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, firstFile)) continue; try { var firstChapterInfo = infos.SingleOrDefault(i => i.FullFilePath.Equals(firstFile.FilePath)); @@ -479,10 +494,10 @@ public class ProcessSeries : IProcessSeries var deletedVolumes = series.Volumes.Except(nonDeletedVolumes); foreach (var volume in deletedVolumes) { - var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? ""; + var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? string.Empty; if (!string.IsNullOrEmpty(file) && _directoryService.FileSystem.File.Exists(file)) { - _logger.LogError( + _logger.LogInformation( "[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file); } @@ -493,7 +508,7 @@ public class ProcessSeries : IProcessSeries series.Volumes = nonDeletedVolumes; } - _logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}", + _logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from count of {StartingVolumeCount} to {VolumeCount}", series.Name, startingVolumeCount, series.Volumes.Count); } @@ -582,7 +597,7 @@ public class ProcessSeries : IProcessSeries { var firstFile = chapter.Files.MinBy(x => x.Chapter); if (firstFile == null || - _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(chapter, false, firstFile)) return; + _cacheHelper.IsFileUnmodifiedSinceCreationOrLastScan(chapter, false, firstFile)) return; var comicInfo = info; if (info == null) @@ -616,14 +631,7 @@ public class ProcessSeries : IProcessSeries } // This needs to check against both Number and Volume to calculate Count - if (!string.IsNullOrEmpty(comicInfo.Number) && float.Parse(comicInfo.Number) > 0) - { - chapter.Count = (int) Math.Floor(float.Parse(comicInfo.Number)); - } - if (!string.IsNullOrEmpty(comicInfo.Volume) && float.Parse(comicInfo.Volume) > 0) - { - chapter.Count = Math.Max(chapter.Count, (int) Math.Floor(float.Parse(comicInfo.Volume))); - } + chapter.Count = comicInfo.CalculatedCount(); void AddPerson(Person person) { @@ -632,13 +640,11 @@ public class ProcessSeries : IProcessSeries void AddGenre(Genre genre) { - //chapter.Genres.Add(genre); GenreHelper.AddGenreIfNotExists(chapter.Genres, genre); } void AddTag(Tag tag, bool added) { - //chapter.Tags.Add(tag); TagHelper.AddTagIfNotExists(chapter.Tags, tag); } @@ -647,7 +653,7 @@ public class ProcessSeries : IProcessSeries { var day = Math.Max(comicInfo.Day, 1); var month = Math.Max(comicInfo.Month, 1); - chapter.ReleaseDate = DateTime.Parse($"{month}/{day}/{comicInfo.Year}"); + chapter.ReleaseDate = new DateTime(comicInfo.Year, month, day); } var people = GetTagValues(comicInfo.Colorist); @@ -736,7 +742,6 @@ public class ProcessSeries : IProcessSeries /// private void UpdatePeople(IEnumerable names, PersonRole role, Action action) { - var allPeopleTypeRole = _people.Where(p => p.Role == role).ToList(); foreach (var name in names) diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 662016415..18cb219e0 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -8,7 +8,7 @@ using System.Threading.Tasks; using API.Data; using API.Data.Repositories; using API.Entities; -using API.Extensions; +using API.Entities.Enums; using API.Helpers; using API.Parser; using API.Services.Tasks.Metadata; @@ -25,14 +25,15 @@ public interface IScannerService /// cover images if forceUpdate is true. /// /// Library to scan against + /// Don't perform optimization checks, defaults to false [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] - [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + [AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)] Task ScanLibrary(int libraryId, bool forceUpdate = false); [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] - [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + [AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)] Task ScanLibraries(); [Queue(TaskScheduler.ScanQueue)] @@ -96,18 +97,39 @@ public class ScannerService : IScannerService _wordCountAnalyzerService = wordCountAnalyzerService; } + /// + /// Given a generic folder path, will invoke a Series scan or Library scan. + /// + /// This will Schedule the job to run 1 minute in the future to allow for any close-by duplicate requests to be dropped + /// public async Task ScanFolder(string folder) { - var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder); - if (seriesId > 0) + Series series = null; + try { - BackgroundJob.Enqueue(() => ScanSeries(seriesId, true)); + series = await _unitOfWork.SeriesRepository.GetSeriesByFolderPath(folder, SeriesIncludes.Library); + } + catch (InvalidOperationException ex) + { + if (ex.Message.Equals("Sequence contains more than one element.")) + { + _logger.LogCritical("[ScannerService] Multiple series map to this folder. Library scan will be used for ScanFolder"); + } + } + if (series != null && series.Library.Type != LibraryType.Book) + { + if (TaskScheduler.HasScanTaskRunningForSeries(series.Id)) + { + _logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder); + return; + } + BackgroundJob.Schedule(() => ScanSeries(series.Id, true), TimeSpan.FromMinutes(1)); return; } // This is basically rework of what's already done in Library Watcher but is needed if invoked via API var parentDirectory = _directoryService.GetParentDirectoryName(folder); - if (string.IsNullOrEmpty(parentDirectory)) return; // This should never happen as it's calculated before enqueing + if (string.IsNullOrEmpty(parentDirectory)) return; var libraries = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()).ToList(); var libraryFolders = libraries.SelectMany(l => l.Folders); @@ -118,12 +140,17 @@ public class ScannerService : IScannerService var library = libraries.FirstOrDefault(l => l.Folders.Select(Scanner.Parser.Parser.NormalizePath).Contains(libraryFolder)); if (library != null) { - BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false)); + if (TaskScheduler.HasScanTaskRunningForLibrary(library.Id)) + { + _logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this library. Dropping request", folder); + return; + } + BackgroundJob.Schedule(() => ScanLibrary(library.Id, false), TimeSpan.FromMinutes(1)); } } /// - /// + /// Scans just an existing Series for changes. If the series doesn't exist, will delete it. /// /// /// Not Used. Scan series will always force @@ -164,6 +191,7 @@ public class ScannerService : IScannerService await _eventHub.SendMessageAsync(MessageFactory.Error, MessageFactory.ErrorEvent($"{series.Name} scan aborted", "Files for series are not in a nested folder under library path. Correct this and rescan.")); return; } + } if (string.IsNullOrEmpty(folderPath)) @@ -173,14 +201,13 @@ public class ScannerService : IScannerService return; } + // If the series path doesn't exist anymore, it was either moved or renamed. We need to essentially delete it var parsedSeries = new Dictionary>(); - var processTasks = new List(); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name)); await _processSeries.Prime(); - void TrackFiles(Tuple> parsedInfo) + async Task TrackFiles(Tuple> parsedInfo) { var parsedFiles = parsedInfo.Item2; if (parsedFiles.Count == 0) return; @@ -192,23 +219,24 @@ public class ScannerService : IScannerService Format = parsedFiles.First().Format }; - if (!foundParsedSeries.NormalizedName.Equals(series.NormalizedName)) + // For Scan Series, we need to filter out anything that isn't our Series + if (!foundParsedSeries.NormalizedName.Equals(series.NormalizedName) && !foundParsedSeries.NormalizedName.Equals(Scanner.Parser.Parser.Normalize(series.OriginalName))) { return; } - processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library)); + await _processSeries.ProcessSeriesAsync(parsedFiles, library); parsedSeries.Add(foundParsedSeries, parsedFiles); } _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); - var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, true); + var scanElapsedTime = await ScanFiles(library, new []{ folderPath }, false, TrackFiles, true); _logger.LogInformation("ScanFiles for {Series} took {Time}", series.Name, scanElapsedTime); - //await Task.WhenAll(processTasks); - await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name)); + + // Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder RemoveParsedInfosNotForSeries(parsedSeries, series); @@ -378,7 +406,7 @@ public class ScannerService : IScannerService [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] - [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + [AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public async Task ScanLibraries() { _logger.LogInformation("Starting Scan of All Libraries"); @@ -396,9 +424,10 @@ public class ScannerService : IScannerService /// ie) all entities will be rechecked for new cover images and comicInfo.xml changes /// /// + /// Defaults to false [Queue(TaskScheduler.ScanQueue)] [DisableConcurrentExecution(60 * 60 * 60)] - [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + [AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public async Task ScanLibrary(int libraryId, bool forceUpdate = false) { var sw = Stopwatch.StartNew(); @@ -423,12 +452,13 @@ public class ScannerService : IScannerService await _processSeries.Prime(); - var processTasks = new List(); - void TrackFiles(Tuple> parsedInfo) + var processTasks = new List>(); + + Task TrackFiles(Tuple> parsedInfo) { var skippedScan = parsedInfo.Item1; var parsedFiles = parsedInfo.Item2; - if (parsedFiles.Count == 0) return; + if (parsedFiles.Count == 0) return Task.CompletedTask; var foundParsedSeries = new ParsedSeries() { @@ -445,21 +475,23 @@ public class ScannerService : IScannerService NormalizedName = Scanner.Parser.Parser.Normalize(pf.Series), Format = pf.Format })); - return; + return Task.CompletedTask; } totalFiles += parsedFiles.Count; seenSeries.Add(foundParsedSeries); - processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library)); + processTasks.Add(async () => await _processSeries.ProcessSeriesAsync(parsedFiles, library)); + return Task.CompletedTask; } + var scanElapsedTime = await ScanFiles(library, libraryFolderPaths, shouldUseLibraryScan, TrackFiles, forceUpdate); - var scanElapsedTime = await ScanFiles(library, libraryFolderPaths, shouldUseLibraryScan, TrackFiles); - - - await Task.WhenAll(processTasks); + foreach (var task in processTasks) + { + await task(); + } await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended)); @@ -473,9 +505,6 @@ public class ScannerService : IScannerService library.LastScanned = time; - // Could I delete anything in a Library's Series where the LastScan date is before scanStart? - // NOTE: This implementation is expensive - var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(seenSeries, library.Id); _unitOfWork.LibraryRepository.Update(library); if (await _unitOfWork.CommitAsync()) @@ -484,7 +513,7 @@ public class ScannerService : IScannerService { _logger.LogInformation( "[ScannerService] Finished library scan of {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}. There were no changes", - totalFiles, seenSeries.Count, sw.ElapsedMilliseconds, library.Name); + seenSeries.Count, sw.ElapsedMilliseconds, library.Name); } else { @@ -493,10 +522,27 @@ public class ScannerService : IScannerService totalFiles, seenSeries.Count, sw.ElapsedMilliseconds, library.Name); } - foreach (var s in removedSeries) + try { - await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, - MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false); + // Could I delete anything in a Library's Series where the LastScan date is before scanStart? + // NOTE: This implementation is expensive + _logger.LogDebug("[ScannerService] Removing Series that were not found during the scan"); + var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(seenSeries, library.Id); + _logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}", + removedSeries.Count, removedSeries.Select(s => s.Name)); + _logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete"); + + await _unitOfWork.CommitAsync(); + + foreach (var s in removedSeries) + { + await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved, + MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false); + } + } + catch (Exception ex) + { + _logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan"); } } else @@ -512,7 +558,7 @@ public class ScannerService : IScannerService } private async Task ScanFiles(Library library, IEnumerable dirs, - bool isLibraryScan, Action>> processSeriesInfos = null, bool forceChecks = false) + bool isLibraryScan, Func>, Task> processSeriesInfos = null, bool forceChecks = false) { var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService, _eventHub); var scanWatch = Stopwatch.StartNew(); @@ -549,4 +595,5 @@ public class ScannerService : IScannerService { return existingSeries.Where(es => !ParserInfoHelpers.SeriesHasMatchingParserInfoFormat(es, parsedSeries)); } + } diff --git a/API/Services/Tasks/StatsService.cs b/API/Services/Tasks/StatsService.cs index a190a4113..0f1653a7c 100644 --- a/API/Services/Tasks/StatsService.cs +++ b/API/Services/Tasks/StatsService.cs @@ -1,4 +1,6 @@ using System; +using System.Collections.Generic; +using System.IO; using System.Linq; using System.Net.Http; using System.Runtime.InteropServices; @@ -21,6 +23,7 @@ public interface IStatsService { Task Send(); Task GetServerInfo(); + Task SendCancellation(); } public class StatsService : IStatsService { @@ -127,6 +130,11 @@ public class StatsService : IStatsService MaxSeriesInALibrary = await MaxSeriesInAnyLibrary(), MaxVolumesInASeries = await MaxVolumesInASeries(), MaxChaptersInASeries = await MaxChaptersInASeries(), + MangaReaderBackgroundColors = await AllMangaReaderBackgroundColors(), + MangaReaderPageSplittingModes = await AllMangaReaderPageSplitting(), + MangaReaderLayoutModes = await AllMangaReaderLayoutModes(), + FileFormats = AllFormats(), + UsingRestrictedProfiles = await GetUsingRestrictedProfiles(), }; var usersWithPref = (await _unitOfWork.UserRepository.GetAllUsersAsync(AppUserIncludes.UserPreferences)).ToList(); @@ -149,6 +157,39 @@ public class StatsService : IStatsService return serverInfo; } + public async Task SendCancellation() + { + _logger.LogInformation("Informing KavitaStats that this instance is no longer sending stats"); + var installId = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).InstallId; + + var responseContent = string.Empty; + + try + { + var response = await (ApiUrl + "/api/v2/stats/opt-out?installId=" + installId) + .WithHeader("Accept", "application/json") + .WithHeader("User-Agent", "Kavita") + .WithHeader("x-api-key", "MsnvA2DfQqxSK5jh") + .WithHeader("x-kavita-version", BuildInfo.Version) + .WithHeader("Content-Type", "application/json") + .WithTimeout(TimeSpan.FromSeconds(30)) + .PostAsync(); + + if (response.StatusCode != StatusCodes.Status200OK) + { + _logger.LogError("KavitaStats did not respond successfully. {Content}", response); + } + } + catch (HttpRequestException e) + { + _logger.LogError(e, "KavitaStats did not respond successfully. {Response}", responseContent); + } + catch (Exception e) + { + _logger.LogError(e, "An error happened during the request to KavitaStats"); + } + } + private Task GetIfUsingSeriesRelationship() { return _context.SeriesRelation.AnyAsync(); @@ -190,4 +231,40 @@ public class StatsService : IStatsService .SelectMany(v => v.Chapters) .Count()); } + + private async Task> AllMangaReaderBackgroundColors() + { + return await _context.AppUserPreferences.Select(p => p.BackgroundColor).Distinct().ToListAsync(); + } + + private async Task> AllMangaReaderPageSplitting() + { + return await _context.AppUserPreferences.Select(p => p.PageSplitOption).Distinct().ToListAsync(); + } + + private async Task> AllMangaReaderLayoutModes() + { + return await _context.AppUserPreferences.Select(p => p.LayoutMode).Distinct().ToListAsync(); + } + + private IEnumerable AllFormats() + { + var results = _context.MangaFile + .AsNoTracking() + .AsEnumerable() + .Select(m => new FileFormatDto() + { + Format = m.Format, + Extension = Path.GetExtension(m.FilePath)?.ToLowerInvariant() + }) + .DistinctBy(f => f.Extension) + .ToList(); + + return results; + } + + private Task GetUsingRestrictedProfiles() + { + return _context.Users.AnyAsync(u => u.AgeRestriction > AgeRating.NotApplicable); + } } diff --git a/API/Services/TokenService.cs b/API/Services/TokenService.cs index 2c8e9926e..927b15907 100644 --- a/API/Services/TokenService.cs +++ b/API/Services/TokenService.cs @@ -75,7 +75,7 @@ public class TokenService : ITokenService var username = tokenContent.Claims.FirstOrDefault(q => q.Type == JwtRegisteredClaimNames.NameId)?.Value; var user = await _userManager.FindByNameAsync(username); if (user == null) return null; // This forces a logout - var isValid = await _userManager.VerifyUserTokenAsync(user, TokenOptions.DefaultProvider, "RefreshToken", request.RefreshToken); + await _userManager.VerifyUserTokenAsync(user, TokenOptions.DefaultProvider, "RefreshToken", request.RefreshToken); await _userManager.UpdateSecurityStampAsync(user); diff --git a/API/SignalR/EventHub.cs b/API/SignalR/EventHub.cs index 3beba3d24..1f7790581 100644 --- a/API/SignalR/EventHub.cs +++ b/API/SignalR/EventHub.cs @@ -56,4 +56,5 @@ public class EventHub : IEventHub var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); await _messageHub.Clients.User(user.UserName).SendAsync(method, message); } + } diff --git a/API/SignalR/LogHub.cs b/API/SignalR/LogHub.cs new file mode 100644 index 000000000..15a30afdb --- /dev/null +++ b/API/SignalR/LogHub.cs @@ -0,0 +1,58 @@ +using System; +using System.Threading.Tasks; +using API.Extensions; +using API.SignalR.Presence; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.SignalR; + +namespace API.SignalR; + +public interface ILogHub : Serilog.Sinks.AspNetCore.SignalR.Interfaces.IHub +{ +} + +[Authorize] +public class LogHub : Hub +{ + private readonly IEventHub _eventHub; + private readonly IPresenceTracker _tracker; + + public LogHub(IEventHub eventHub, IPresenceTracker tracker) + { + _eventHub = eventHub; + _tracker = tracker; + } + + + public override async Task OnConnectedAsync() + { + await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId); + await base.OnConnectedAsync(); + } + + public override async Task OnDisconnectedAsync(Exception exception) + { + await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId); + await base.OnDisconnectedAsync(exception); + } + + public async Task SendLogAsString(string message) + { + await _eventHub.SendMessageAsync("LogString", new SignalRMessage() + { + Body = message, + EventType = "LogString", + Name = "LogString", + }, true); + } + + public async Task SendLogAsObject(object messageObject) + { + await _eventHub.SendMessageAsync("LogObject", new SignalRMessage() + { + Body = messageObject, + EventType = "LogString", + Name = "LogString", + }, true); + } +} diff --git a/API/SignalR/MessageFactory.cs b/API/SignalR/MessageFactory.cs index 74ee4cc0f..a702396d3 100644 --- a/API/SignalR/MessageFactory.cs +++ b/API/SignalR/MessageFactory.cs @@ -6,476 +6,492 @@ using API.DTOs.Update; using API.Entities; using API.Extensions; -namespace API.SignalR +namespace API.SignalR; + +public static class MessageFactoryEntityTypes { - public static class MessageFactoryEntityTypes + public const string Series = "series"; + public const string Volume = "volume"; + public const string Chapter = "chapter"; + public const string CollectionTag = "collection"; + public const string ReadingList = "readingList"; +} +public static class MessageFactory +{ + /// + /// An update is available for the Kavita instance + /// + public const string UpdateAvailable = "UpdateAvailable"; + /// + /// Used to tell when a scan series completes. This also informs UI to update series metadata + /// + public const string ScanSeries = "ScanSeries"; + /// + /// Event sent out during Refresh Metadata for progress tracking + /// + private const string CoverUpdateProgress = "CoverUpdateProgress"; + /// + /// Series is added to server + /// + public const string SeriesAdded = "SeriesAdded"; + /// + /// Series is removed from server + /// + public const string SeriesRemoved = "SeriesRemoved"; + /// + /// When a user is connects/disconnects from server + /// + public const string OnlineUsers = "OnlineUsers"; + /// + /// When a series is added to a collection + /// + public const string SeriesAddedToCollection = "SeriesAddedToCollection"; + /// + /// Event sent out during backing up the database + /// + private const string BackupDatabaseProgress = "BackupDatabaseProgress"; + /// + /// Event sent out during cleaning up temp and cache folders + /// + private const string CleanupProgress = "CleanupProgress"; + /// + /// Event sent out during downloading of files + /// + private const string DownloadProgress = "DownloadProgress"; + /// + /// A cover was updated + /// + public const string CoverUpdate = "CoverUpdate"; + /// + /// A custom site theme was removed or added + /// + private const string SiteThemeProgress = "SiteThemeProgress"; + /// + /// A custom book theme was removed or added + /// + private const string BookThemeProgress = "BookThemeProgress"; + /// + /// A type of event that has progress (determinate or indeterminate). + /// The underlying event will have a name to give details on how to handle. + /// + /// This is not an Event Name, it is used as the method only + public const string NotificationProgress = "NotificationProgress"; + /// + /// Event sent out when Scan Loop is parsing a file + /// + private const string FileScanProgress = "FileScanProgress"; + /// + /// A generic error that can occur in background processing + /// + public const string Error = "Error"; + /// + /// When DB updates are occuring during a library/series scan + /// + private const string ScanProgress = "ScanProgress"; + /// + /// When a library is created/deleted in the Server + /// + public const string LibraryModified = "LibraryModified"; + /// + /// A user's progress was modified + /// + public const string UserProgressUpdate = "UserProgressUpdate"; + /// + /// A user's account or preferences were updated and UI needs to refresh to stay in sync + /// + public const string UserUpdate = "UserUpdate"; + /// + /// When bulk bookmarks are being converted + /// + private const string ConvertBookmarksProgress = "ConvertBookmarksProgress"; + /// + /// When files are being scanned to calculate word count + /// + private const string WordCountAnalyzerProgress = "WordCountAnalyzerProgress"; + /// + /// A generic message that can occur in background processing to inform user, but no direct action is needed + /// + public const string Info = "Info"; + /// + /// When files are being emailed to a device + /// + public const string SendingToDevice = "SendingToDevice"; + + + public static SignalRMessage ScanSeriesEvent(int libraryId, int seriesId, string seriesName) { - public const string Series = "series"; - public const string Volume = "volume"; - public const string Chapter = "chapter"; - public const string CollectionTag = "collection"; - public const string ReadingList = "readingList"; + return new SignalRMessage() + { + Name = ScanSeries, + EventType = ProgressEventType.Single, + Body = new + { + LibraryId = libraryId, + SeriesId = seriesId, + SeriesName = seriesName + } + }; } - public static class MessageFactory + + public static SignalRMessage SeriesAddedEvent(int seriesId, string seriesName, int libraryId) { - /// - /// An update is available for the Kavita instance - /// - public const string UpdateAvailable = "UpdateAvailable"; - /// - /// Used to tell when a scan series completes. This also informs UI to update series metadata - /// - public const string ScanSeries = "ScanSeries"; - /// - /// Event sent out during Refresh Metadata for progress tracking - /// - private const string CoverUpdateProgress = "CoverUpdateProgress"; - /// - /// Series is added to server - /// - public const string SeriesAdded = "SeriesAdded"; - /// - /// Series is removed from server - /// - public const string SeriesRemoved = "SeriesRemoved"; - /// - /// When a user is connects/disconnects from server - /// - public const string OnlineUsers = "OnlineUsers"; - /// - /// When a series is added to a collection - /// - public const string SeriesAddedToCollection = "SeriesAddedToCollection"; - /// - /// Event sent out during backing up the database - /// - private const string BackupDatabaseProgress = "BackupDatabaseProgress"; - /// - /// Event sent out during cleaning up temp and cache folders - /// - private const string CleanupProgress = "CleanupProgress"; - /// - /// Event sent out during downloading of files - /// - private const string DownloadProgress = "DownloadProgress"; - /// - /// A cover was updated - /// - public const string CoverUpdate = "CoverUpdate"; - /// - /// A custom site theme was removed or added - /// - private const string SiteThemeProgress = "SiteThemeProgress"; - /// - /// A custom book theme was removed or added - /// - private const string BookThemeProgress = "BookThemeProgress"; - /// - /// A type of event that has progress (determinate or indeterminate). - /// The underlying event will have a name to give details on how to handle. - /// - /// This is not an Event Name, it is used as the method only - public const string NotificationProgress = "NotificationProgress"; - /// - /// Event sent out when Scan Loop is parsing a file - /// - private const string FileScanProgress = "FileScanProgress"; - /// - /// A generic error that can occur in background processing - /// - public const string Error = "Error"; - /// - /// When DB updates are occuring during a library/series scan - /// - private const string ScanProgress = "ScanProgress"; - /// - /// When a library is created/deleted in the Server - /// - public const string LibraryModified = "LibraryModified"; - /// - /// A user's progress was modified - /// - public const string UserProgressUpdate = "UserProgressUpdate"; - /// - /// A user's account or preferences were updated and UI needs to refresh to stay in sync - /// - public const string UserUpdate = "UserUpdate"; - /// - /// When bulk bookmarks are being converted - /// - private const string ConvertBookmarksProgress = "ConvertBookmarksProgress"; - /// - /// When files are being scanned to calculate word count - /// - private const string WordCountAnalyzerProgress = "WordCountAnalyzerProgress"; - /// - /// A generic message that can occur in background processing to inform user, but no direct action is needed - /// - public const string Info = "Info"; - - - public static SignalRMessage ScanSeriesEvent(int libraryId, int seriesId, string seriesName) + return new SignalRMessage() { - return new SignalRMessage() + Name = SeriesAdded, + Body = new { - Name = ScanSeries, - EventType = ProgressEventType.Single, - Body = new - { - LibraryId = libraryId, - SeriesId = seriesId, - SeriesName = seriesName - } - }; - } + SeriesId = seriesId, + SeriesName = seriesName, + LibraryId = libraryId + } + }; + } - public static SignalRMessage SeriesAddedEvent(int seriesId, string seriesName, int libraryId) + public static SignalRMessage SeriesRemovedEvent(int seriesId, string seriesName, int libraryId) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = SeriesRemoved, + Body = new { - Name = SeriesAdded, - Body = new - { - SeriesId = seriesId, - SeriesName = seriesName, - LibraryId = libraryId - } - }; - } + SeriesId = seriesId, + SeriesName = seriesName, + LibraryId = libraryId + } + }; + } - public static SignalRMessage SeriesRemovedEvent(int seriesId, string seriesName, int libraryId) + + public static SignalRMessage WordCountAnalyzerProgressEvent(int libraryId, float progress, string eventType, string subtitle = "") + { + return new SignalRMessage() { - return new SignalRMessage() + Name = WordCountAnalyzerProgress, + Title = "Analyzing Word count", + SubTitle = subtitle, + EventType = eventType, + Progress = ProgressType.Determinate, + Body = new { - Name = SeriesRemoved, - Body = new - { - SeriesId = seriesId, - SeriesName = seriesName, - LibraryId = libraryId - } - }; - } + LibraryId = libraryId, + Progress = progress, + EventTime = DateTime.Now + } + }; + } - - public static SignalRMessage WordCountAnalyzerProgressEvent(int libraryId, float progress, string eventType, string subtitle = "") + public static SignalRMessage CoverUpdateProgressEvent(int libraryId, float progress, string eventType, string subtitle = "") + { + return new SignalRMessage() { - return new SignalRMessage() + Name = CoverUpdateProgress, + Title = "Refreshing Covers", + SubTitle = subtitle, + EventType = eventType, + Progress = ProgressType.Determinate, + Body = new { - Name = WordCountAnalyzerProgress, - Title = "Analyzing Word count", - SubTitle = subtitle, - EventType = eventType, - Progress = ProgressType.Determinate, - Body = new - { - LibraryId = libraryId, - Progress = progress, - EventTime = DateTime.Now - } - }; - } + LibraryId = libraryId, + Progress = progress, + EventTime = DateTime.Now + } + }; + } - public static SignalRMessage CoverUpdateProgressEvent(int libraryId, float progress, string eventType, string subtitle = "") + public static SignalRMessage BackupDatabaseProgressEvent(float progress, string subtitle = "") + { + return new SignalRMessage() { - return new SignalRMessage() + Name = BackupDatabaseProgress, + Title = "Backing up Database", + SubTitle = subtitle, + EventType = progress switch { - Name = CoverUpdateProgress, - Title = "Refreshing Covers", - SubTitle = subtitle, - EventType = eventType, - Progress = ProgressType.Determinate, - Body = new - { - LibraryId = libraryId, - Progress = progress, - EventTime = DateTime.Now - } - }; - } - - public static SignalRMessage BackupDatabaseProgressEvent(float progress, string subtitle = "") + 0f => "started", + 1f => "ended", + _ => "updated" + }, + Progress = ProgressType.Determinate, + Body = new + { + Progress = progress + } + }; + } + public static SignalRMessage CleanupProgressEvent(float progress, string subtitle = "") + { + return new SignalRMessage() { - return new SignalRMessage() + Name = CleanupProgress, + Title = "Performing Cleanup", + SubTitle = subtitle, + EventType = progress switch { - Name = BackupDatabaseProgress, - Title = "Backing up Database", - SubTitle = subtitle, - EventType = progress switch - { - 0f => "started", - 1f => "ended", - _ => "updated" - }, - Progress = ProgressType.Determinate, - Body = new - { - Progress = progress - } - }; - } - public static SignalRMessage CleanupProgressEvent(float progress, string subtitle = "") + 0f => "started", + 1f => "ended", + _ => "updated" + }, + Progress = ProgressType.Determinate, + Body = new + { + Progress = progress + } + }; + } + + + public static SignalRMessage UpdateVersionEvent(UpdateNotificationDto update) + { + return new SignalRMessage { - return new SignalRMessage() - { - Name = CleanupProgress, - Title = "Performing Cleanup", - SubTitle = subtitle, - EventType = progress switch - { - 0f => "started", - 1f => "ended", - _ => "updated" - }, - Progress = ProgressType.Determinate, - Body = new - { - Progress = progress - } - }; - } + Name = UpdateAvailable, + Title = "Update Available", + SubTitle = update.UpdateTitle, + EventType = ProgressEventType.Single, + Progress = ProgressType.None, + Body = update + }; + } - - public static SignalRMessage UpdateVersionEvent(UpdateNotificationDto update) + public static SignalRMessage SendingToDeviceEvent(string subtitle, string eventType) + { + return new SignalRMessage { - return new SignalRMessage - { - Name = UpdateAvailable, - Title = "Update Available", - SubTitle = update.UpdateTitle, - EventType = ProgressEventType.Single, - Progress = ProgressType.None, - Body = update - }; - } + Name = SendingToDevice, + Title = "Sending files to Device", + SubTitle = subtitle, + EventType = eventType, + Progress = ProgressType.Indeterminate, + Body = new { } + }; + } - public static SignalRMessage SeriesAddedToCollectionEvent(int tagId, int seriesId) + public static SignalRMessage SeriesAddedToCollectionEvent(int tagId, int seriesId) + { + return new SignalRMessage { - return new SignalRMessage + Name = SeriesAddedToCollection, + Progress = ProgressType.None, + EventType = ProgressEventType.Single, + Body = new { - Name = SeriesAddedToCollection, - Progress = ProgressType.None, - EventType = ProgressEventType.Single, - Body = new - { - TagId = tagId, - SeriesId = seriesId - } - }; - } + TagId = tagId, + SeriesId = seriesId + } + }; + } - public static SignalRMessage ErrorEvent(string title, string subtitle) + public static SignalRMessage ErrorEvent(string title, string subtitle) + { + return new SignalRMessage { - return new SignalRMessage + Name = Error, + Title = title, + SubTitle = subtitle, + Progress = ProgressType.None, + EventType = ProgressEventType.Single, + Body = new { - Name = Error, Title = title, SubTitle = subtitle, - Progress = ProgressType.None, - EventType = ProgressEventType.Single, - Body = new - { - Title = title, - SubTitle = subtitle, - } - }; - } + } + }; + } - public static SignalRMessage InfoEvent(string title, string subtitle) + public static SignalRMessage InfoEvent(string title, string subtitle) + { + return new SignalRMessage { - return new SignalRMessage + Name = Info, + Title = title, + SubTitle = subtitle, + Progress = ProgressType.None, + EventType = ProgressEventType.Single, + Body = new { - Name = Info, Title = title, SubTitle = subtitle, - Progress = ProgressType.None, - EventType = ProgressEventType.Single, - Body = new - { - Title = title, - SubTitle = subtitle, - } - }; - } + } + }; + } - public static SignalRMessage LibraryModifiedEvent(int libraryId, string action) + public static SignalRMessage LibraryModifiedEvent(int libraryId, string action) + { + return new SignalRMessage { - return new SignalRMessage + Name = LibraryModified, + Title = "Library modified", + Progress = ProgressType.None, + EventType = ProgressEventType.Single, + Body = new { - Name = LibraryModified, - Title = "Library modified", - Progress = ProgressType.None, - EventType = ProgressEventType.Single, - Body = new - { - LibrayId = libraryId, - Action = action, - } - }; - } + LibrayId = libraryId, + Action = action, + } + }; + } - public static SignalRMessage DownloadProgressEvent(string username, string downloadName, float progress, string eventType = "updated") + public static SignalRMessage DownloadProgressEvent(string username, string downloadName, float progress, string eventType = "updated") + { + return new SignalRMessage() { - return new SignalRMessage() + Name = DownloadProgress, + Title = $"Downloading {downloadName}", + SubTitle = $"Preparing {username.SentenceCase()} the download of {downloadName}", + EventType = eventType, + Progress = ProgressType.Determinate, + Body = new { - Name = DownloadProgress, - Title = $"Downloading {downloadName}", - SubTitle = $"Preparing {username.SentenceCase()} the download of {downloadName}", - EventType = eventType, - Progress = ProgressType.Determinate, - Body = new - { - UserName = username, - DownloadName = downloadName, - Progress = progress - } - }; - } + UserName = username, + DownloadName = downloadName, + Progress = progress + } + }; + } - /// - /// Represents a file being scanned by Kavita for processing and grouping - /// - /// Does not have a progress as it's unknown how many files there are. Instead sends -1 to represent indeterminate - /// - /// - /// - /// - public static SignalRMessage FileScanProgressEvent(string folderPath, string libraryName, string eventType) + /// + /// Represents a file being scanned by Kavita for processing and grouping + /// + /// Does not have a progress as it's unknown how many files there are. Instead sends -1 to represent indeterminate + /// + /// + /// + /// + public static SignalRMessage FileScanProgressEvent(string folderPath, string libraryName, string eventType) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = FileScanProgress, + Title = $"Scanning {libraryName}", + SubTitle = folderPath, + EventType = eventType, + Progress = ProgressType.Indeterminate, + Body = new { - Name = FileScanProgress, Title = $"Scanning {libraryName}", - SubTitle = folderPath, - EventType = eventType, - Progress = ProgressType.Indeterminate, - Body = new - { - Title = $"Scanning {libraryName}", - Subtitle = folderPath, - Filename = folderPath, - EventTime = DateTime.Now, - } - }; - } + Subtitle = folderPath, + Filename = folderPath, + EventTime = DateTime.Now, + } + }; + } - /// - /// This informs the UI with details about what is being processed by the Scanner - /// - /// - /// - /// - /// - public static SignalRMessage LibraryScanProgressEvent(string libraryName, string eventType, string seriesName = "") + /// + /// This informs the UI with details about what is being processed by the Scanner + /// + /// + /// + /// + /// + public static SignalRMessage LibraryScanProgressEvent(string libraryName, string eventType, string seriesName = "") + { + return new SignalRMessage() { - return new SignalRMessage() - { - Name = ScanProgress, - Title = $"Processing {seriesName}", - SubTitle = seriesName, - EventType = eventType, - Progress = ProgressType.Indeterminate, - Body = null - }; - } + Name = ScanProgress, + Title = $"Processing {seriesName}", + SubTitle = seriesName, + EventType = eventType, + Progress = ProgressType.Indeterminate, + Body = null + }; + } - public static SignalRMessage CoverUpdateEvent(int id, string entityType) + public static SignalRMessage CoverUpdateEvent(int id, string entityType) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = CoverUpdate, + Title = "Updating Cover", + Progress = ProgressType.None, + Body = new { - Name = CoverUpdate, - Title = "Updating Cover", - Progress = ProgressType.None, - Body = new - { - Id = id, - EntityType = entityType, - } - }; - } + Id = id, + EntityType = entityType, + } + }; + } - public static SignalRMessage UserProgressUpdateEvent(int userId, string username, int seriesId, int volumeId, int chapterId, int pagesRead) + public static SignalRMessage UserProgressUpdateEvent(int userId, string username, int seriesId, int volumeId, int chapterId, int pagesRead) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = UserProgressUpdate, + Title = "Updating User Progress", + Progress = ProgressType.None, + Body = new { - Name = UserProgressUpdate, - Title = "Updating User Progress", - Progress = ProgressType.None, - Body = new - { - UserId = userId, - Username = username, - SeriesId = seriesId, - VolumeId = volumeId, - ChapterId = chapterId, - PagesRead = pagesRead, - } - }; - } + UserId = userId, + Username = username, + SeriesId = seriesId, + VolumeId = volumeId, + ChapterId = chapterId, + PagesRead = pagesRead, + } + }; + } - public static SignalRMessage SiteThemeProgressEvent(string subtitle, string themeName, string eventType) + public static SignalRMessage SiteThemeProgressEvent(string subtitle, string themeName, string eventType) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = SiteThemeProgress, + Title = "Scanning Site Theme", + SubTitle = subtitle, + EventType = eventType, + Progress = ProgressType.Indeterminate, + Body = new { - Name = SiteThemeProgress, - Title = "Scanning Site Theme", - SubTitle = subtitle, - EventType = eventType, - Progress = ProgressType.Indeterminate, - Body = new - { - ThemeName = themeName, - } - }; - } + ThemeName = themeName, + } + }; + } - public static SignalRMessage BookThemeProgressEvent(string subtitle, string themeName, string eventType) + public static SignalRMessage BookThemeProgressEvent(string subtitle, string themeName, string eventType) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = BookThemeProgress, + Title = "Scanning Book Theme", + SubTitle = subtitle, + EventType = eventType, + Progress = ProgressType.Indeterminate, + Body = new { - Name = BookThemeProgress, - Title = "Scanning Book Theme", - SubTitle = subtitle, - EventType = eventType, - Progress = ProgressType.Indeterminate, - Body = new - { - ThemeName = themeName, - } - }; - } + ThemeName = themeName, + } + }; + } - public static SignalRMessage UserUpdateEvent(int userId, string userName) + public static SignalRMessage UserUpdateEvent(int userId, string userName) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = UserUpdate, + Title = "User Update", + Progress = ProgressType.None, + Body = new { - Name = UserUpdate, - Title = "User Update", - Progress = ProgressType.None, - Body = new - { - UserId = userId, - UserName = userName - } - }; - } + UserId = userId, + UserName = userName + } + }; + } - public static SignalRMessage ConvertBookmarksProgressEvent(float progress, string eventType) + public static SignalRMessage ConvertBookmarksProgressEvent(float progress, string eventType) + { + return new SignalRMessage() { - return new SignalRMessage() + Name = ConvertBookmarksProgress, + Title = "Converting Bookmarks to WebP", + SubTitle = string.Empty, + EventType = eventType, + Progress = ProgressType.Determinate, + Body = new { - Name = ConvertBookmarksProgress, - Title = "Converting Bookmarks to WebP", - SubTitle = string.Empty, - EventType = eventType, - Progress = ProgressType.Determinate, - Body = new - { - Progress = progress, - EventTime = DateTime.Now - } - }; - } + Progress = progress, + EventTime = DateTime.Now + } + }; } } diff --git a/API/SignalR/MessageHub.cs b/API/SignalR/MessageHub.cs index dd2e2b768..e56dfeaa0 100644 --- a/API/SignalR/MessageHub.cs +++ b/API/SignalR/MessageHub.cs @@ -1,47 +1,45 @@ using System; -using System.Collections.Generic; using System.Threading.Tasks; -using API.Data; using API.Extensions; using API.SignalR.Presence; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.SignalR; -namespace API.SignalR +namespace API.SignalR; + +/// +/// Generic hub for sending messages to UI +/// +[Authorize] +public class MessageHub : Hub { - /// - /// Generic hub for sending messages to UI - /// - [Authorize] - public class MessageHub : Hub + private readonly IPresenceTracker _tracker; + + public MessageHub(IPresenceTracker tracker) { - private readonly IPresenceTracker _tracker; + _tracker = tracker; + } - public MessageHub(IPresenceTracker tracker) - { - _tracker = tracker; - } + public override async Task OnConnectedAsync() + { + await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId); - public override async Task OnConnectedAsync() - { - await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId); - - var currentUsers = await PresenceTracker.GetOnlineUsers(); - await Clients.All.SendAsync(MessageFactory.OnlineUsers, currentUsers); + var currentUsers = await PresenceTracker.GetOnlineUsers(); + await Clients.All.SendAsync(MessageFactory.OnlineUsers, currentUsers); - await base.OnConnectedAsync(); - } + await base.OnConnectedAsync(); + } - public override async Task OnDisconnectedAsync(Exception exception) - { - await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId); + public override async Task OnDisconnectedAsync(Exception exception) + { + await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId); - var currentUsers = await PresenceTracker.GetOnlineUsers(); - await Clients.All.SendAsync(MessageFactory.OnlineUsers, currentUsers); + var currentUsers = await PresenceTracker.GetOnlineUsers(); + await Clients.All.SendAsync(MessageFactory.OnlineUsers, currentUsers); - await base.OnDisconnectedAsync(exception); - } + await base.OnDisconnectedAsync(exception); } } + diff --git a/API/SignalR/Presence/PresenceTracker.cs b/API/SignalR/Presence/PresenceTracker.cs index 40cec42d0..5cf847c6e 100644 --- a/API/SignalR/Presence/PresenceTracker.cs +++ b/API/SignalR/Presence/PresenceTracker.cs @@ -4,112 +4,110 @@ using System.Linq; using System.Threading.Tasks; using API.Data; -namespace API.SignalR.Presence +namespace API.SignalR.Presence; + +public interface IPresenceTracker { - public interface IPresenceTracker - { - Task UserConnected(string username, string connectionId); - Task UserDisconnected(string username, string connectionId); - Task GetOnlineAdmins(); - Task> GetConnectionsForUser(string username); + Task UserConnected(string username, string connectionId); + Task UserDisconnected(string username, string connectionId); + Task GetOnlineAdmins(); + Task> GetConnectionsForUser(string username); +} + +internal class ConnectionDetail +{ + public List ConnectionIds { get; set; } + public bool IsAdmin { get; set; } +} + +// TODO: This can respond to UserRoleUpdate events to handle online users +/// +/// This is a singleton service for tracking what users have a SignalR connection and their difference connectionIds +/// +public class PresenceTracker : IPresenceTracker +{ + private readonly IUnitOfWork _unitOfWork; + private static readonly Dictionary OnlineUsers = new Dictionary(); + + public PresenceTracker(IUnitOfWork unitOfWork) + { + _unitOfWork = unitOfWork; } - internal class ConnectionDetail + public async Task UserConnected(string username, string connectionId) { - public List ConnectionIds { get; set; } - public bool IsAdmin { get; set; } + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); + if (user == null) return; + var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); + lock (OnlineUsers) + { + if (OnlineUsers.ContainsKey(username)) + { + OnlineUsers[username].ConnectionIds.Add(connectionId); + } + else + { + OnlineUsers.Add(username, new ConnectionDetail() + { + ConnectionIds = new List() {connectionId}, + IsAdmin = isAdmin + }); + } + } + + // Update the last active for the user + user.LastActive = DateTime.Now; + await _unitOfWork.CommitAsync(); } - // TODO: This can respond to UserRoleUpdate events to handle online users - /// - /// This is a singleton service for tracking what users have a SignalR connection and their difference connectionIds - /// - public class PresenceTracker : IPresenceTracker + public Task UserDisconnected(string username, string connectionId) { - private readonly IUnitOfWork _unitOfWork; - private static readonly Dictionary OnlineUsers = new Dictionary(); - - public PresenceTracker(IUnitOfWork unitOfWork) + lock (OnlineUsers) { - _unitOfWork = unitOfWork; - } + if (!OnlineUsers.ContainsKey(username)) return Task.CompletedTask; - public async Task UserConnected(string username, string connectionId) - { - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); - if (user == null) return; - var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user); - lock (OnlineUsers) + OnlineUsers[username].ConnectionIds.Remove(connectionId); + + if (OnlineUsers[username].ConnectionIds.Count == 0) { - if (OnlineUsers.ContainsKey(username)) - { - OnlineUsers[username].ConnectionIds.Add(connectionId); - } - else - { - OnlineUsers.Add(username, new ConnectionDetail() - { - ConnectionIds = new List() {connectionId}, - IsAdmin = isAdmin - }); - } + OnlineUsers.Remove(username); } - - // Update the last active for the user - user.LastActive = DateTime.Now; - await _unitOfWork.CommitAsync(); } + return Task.CompletedTask; + } - public Task UserDisconnected(string username, string connectionId) + public static Task GetOnlineUsers() + { + string[] onlineUsers; + lock (OnlineUsers) { - lock (OnlineUsers) - { - if (!OnlineUsers.ContainsKey(username)) return Task.CompletedTask; - - OnlineUsers[username].ConnectionIds.Remove(connectionId); - - if (OnlineUsers[username].ConnectionIds.Count == 0) - { - OnlineUsers.Remove(username); - } - } - return Task.CompletedTask; + onlineUsers = OnlineUsers.OrderBy(k => k.Key).Select(k => k.Key).ToArray(); } - public static Task GetOnlineUsers() + return Task.FromResult(onlineUsers); + } + + public Task GetOnlineAdmins() + { + string[] onlineUsers; + lock (OnlineUsers) { - string[] onlineUsers; - lock (OnlineUsers) - { - onlineUsers = OnlineUsers.OrderBy(k => k.Key).Select(k => k.Key).ToArray(); - } - - return Task.FromResult(onlineUsers); + onlineUsers = OnlineUsers.Where(pair => pair.Value.IsAdmin).OrderBy(k => k.Key).Select(k => k.Key).ToArray(); } - public Task GetOnlineAdmins() + + return Task.FromResult(onlineUsers); + } + + public Task> GetConnectionsForUser(string username) + { + List connectionIds; + lock (OnlineUsers) { - // TODO: This might end in stale data, we want to get the online users, query against DB to check if they are admins then return - string[] onlineUsers; - lock (OnlineUsers) - { - onlineUsers = OnlineUsers.Where(pair => pair.Value.IsAdmin).OrderBy(k => k.Key).Select(k => k.Key).ToArray(); - } - - - return Task.FromResult(onlineUsers); + connectionIds = OnlineUsers.GetValueOrDefault(username)?.ConnectionIds; } - public Task> GetConnectionsForUser(string username) - { - List connectionIds; - lock (OnlineUsers) - { - connectionIds = OnlineUsers.GetValueOrDefault(username)?.ConnectionIds; - } - - return Task.FromResult(connectionIds ?? new List()); - } + return Task.FromResult(connectionIds ?? new List()); } } diff --git a/API/SignalR/SignalRMessage.cs b/API/SignalR/SignalRMessage.cs index d9564d027..6c8afe844 100644 --- a/API/SignalR/SignalRMessage.cs +++ b/API/SignalR/SignalRMessage.cs @@ -1,39 +1,38 @@ using System; -namespace API.SignalR +namespace API.SignalR; + +/// +/// Payload for SignalR messages to Frontend +/// +public class SignalRMessage { /// - /// Payload for SignalR messages to Frontend + /// Body of the event type /// - public class SignalRMessage - { - /// - /// Body of the event type - /// - public object Body { get; set; } - public string Name { get; set; } - /// - /// User friendly Title of the Event - /// - /// Scanning Manga - public string Title { get; set; } = string.Empty; - /// - /// User friendly subtitle. Should have extra info - /// - /// C:/manga/Accel World V01.cbz - public string SubTitle { get; set; } = string.Empty; - /// - /// Represents what this represents. started | updated | ended | single - /// - /// - public string EventType { get; set; } = ProgressEventType.Updated; - /// - /// How should progress be represented. If Determinate, the Body MUST have a Progress float on it. - /// - public string Progress { get; set; } = ProgressType.None; - /// - /// When event took place - /// - public readonly DateTime EventTime = DateTime.Now; - } + public object Body { get; set; } + public string Name { get; set; } + /// + /// User friendly Title of the Event + /// + /// Scanning Manga + public string Title { get; set; } = string.Empty; + /// + /// User friendly subtitle. Should have extra info + /// + /// C:/manga/Accel World V01.cbz + public string SubTitle { get; set; } = string.Empty; + /// + /// Represents what this represents. started | updated | ended | single + /// + /// + public string EventType { get; set; } = ProgressEventType.Updated; + /// + /// How should progress be represented. If Determinate, the Body MUST have a Progress float on it. + /// + public string Progress { get; set; } = ProgressType.None; + /// + /// When event took place + /// + public readonly DateTime EventTime = DateTime.Now; } diff --git a/API/Startup.cs b/API/Startup.cs index 6297453a0..00351a3fa 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -5,11 +5,11 @@ using System.Linq; using System.Net; using System.Net.Sockets; using System.Threading.Tasks; -using API.Constants; using API.Data; using API.Entities; using API.Entities.Enums; using API.Extensions; +using API.Logging; using API.Middleware; using API.Services; using API.Services.HostedServices; @@ -22,7 +22,6 @@ using Kavita.Common; using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http.Features; using Microsoft.AspNetCore.HttpOverrides; using Microsoft.AspNetCore.Identity; @@ -35,164 +34,190 @@ using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; using Microsoft.Net.Http.Headers; using Microsoft.OpenApi.Models; +using Serilog; using TaskScheduler = API.Services.TaskScheduler; -namespace API +namespace API; + +public class Startup { - public class Startup + private readonly IConfiguration _config; + private readonly IWebHostEnvironment _env; + + public Startup(IConfiguration config, IWebHostEnvironment env) { - private readonly IConfiguration _config; - private readonly IWebHostEnvironment _env; + _config = config; + _env = env; + } - public Startup(IConfiguration config, IWebHostEnvironment env) - { - _config = config; - _env = env; - } + // This method gets called by the runtime. Use this method to add services to the container. + public void ConfigureServices(IServiceCollection services) + { + services.AddApplicationServices(_config, _env); - // This method gets called by the runtime. Use this method to add services to the container. - public void ConfigureServices(IServiceCollection services) + services.AddControllers(options => { - services.AddApplicationServices(_config, _env); - services.AddControllers(options => + options.CacheProfiles.Add("Images", + new CacheProfile() + { + Duration = 60, + Location = ResponseCacheLocation.None, + NoStore = false + }); + options.CacheProfiles.Add("Hour", + new CacheProfile() + { + Duration = 60 * 60, + Location = ResponseCacheLocation.None, + NoStore = false + }); + options.CacheProfiles.Add("10Minute", + new CacheProfile() + { + Duration = 60 * 10, + Location = ResponseCacheLocation.None, + NoStore = false + }); + options.CacheProfiles.Add("5Minute", + new CacheProfile() + { + Duration = 60 * 5, + Location = ResponseCacheLocation.None, + }); + // Instant is a very quick cache, because we can't bust based on the query params, but rather body + options.CacheProfiles.Add("Instant", + new CacheProfile() + { + Duration = 30, + Location = ResponseCacheLocation.None, + }); + }); + services.Configure(options => + { + options.ForwardedHeaders = ForwardedHeaders.All; + foreach(var proxy in _config.GetSection("KnownProxies").AsEnumerable().Where(c => c.Value != null)) { + options.KnownProxies.Add(IPAddress.Parse(proxy.Value)); + } + }); + services.AddCors(); + services.AddIdentityServices(_config); + services.AddSwaggerGen(c => + { + c.SwaggerDoc("v1", new OpenApiInfo() { - options.CacheProfiles.Add("Images", - new CacheProfile() - { - Duration = 60, - Location = ResponseCacheLocation.None, - NoStore = false - }); - options.CacheProfiles.Add("Hour", - new CacheProfile() - { - Duration = 60 * 10, - Location = ResponseCacheLocation.None, - NoStore = false - }); + Description = "Kavita provides a set of APIs that are authenticated by JWT. JWT token can be copied from local storage.", + Title = "Kavita API", + Version = "v1", }); - services.Configure(options => - { - options.ForwardedHeaders = ForwardedHeaders.All; - foreach(var proxy in _config.GetSection("KnownProxies").AsEnumerable().Where(c => c.Value != null)) { - options.KnownProxies.Add(IPAddress.Parse(proxy.Value)); + + + var filePath = Path.Combine(AppContext.BaseDirectory, "API.xml"); + c.IncludeXmlComments(filePath, true); + c.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme { + In = ParameterLocation.Header, + Description = "Please insert JWT with Bearer into field", + Name = "Authorization", + Type = SecuritySchemeType.ApiKey + }); + c.AddSecurityRequirement(new OpenApiSecurityRequirement { + { + new OpenApiSecurityScheme + { + Reference = new OpenApiReference + { + Type = ReferenceType.SecurityScheme, + Id = "Bearer" + } + }, + Array.Empty() } }); - services.AddCors(); - services.AddIdentityServices(_config); - services.AddSwaggerGen(c => + + c.AddServer(new OpenApiServer() { - c.SwaggerDoc("v1", new OpenApiInfo() - { - Description = "Kavita provides a set of APIs that are authenticated by JWT. JWT token can be copied from local storage.", - Title = "Kavita API", - Version = "v1", - }); - - - var filePath = Path.Combine(AppContext.BaseDirectory, "API.xml"); - c.IncludeXmlComments(filePath, true); - c.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme { - In = ParameterLocation.Header, - Description = "Please insert JWT with Bearer into field", - Name = "Authorization", - Type = SecuritySchemeType.ApiKey - }); - c.AddSecurityRequirement(new OpenApiSecurityRequirement { - { - new OpenApiSecurityScheme - { - Reference = new OpenApiReference - { - Type = ReferenceType.SecurityScheme, - Id = "Bearer" - } - }, - Array.Empty() - } - }); - - c.AddServer(new OpenApiServer() - { - Description = "Custom Url", - Url = "/" - }); - - c.AddServer(new OpenApiServer() - { - Description = "Local Server", - Url = "http://localhost:5000/", - }); - - c.AddServer(new OpenApiServer() - { - Url = "https://demo.kavitareader.com/", - Description = "Kavita Demo" - }); - - c.AddServer(new OpenApiServer() - { - Url = "http://" + GetLocalIpAddress() + ":5000/", - Description = "Local IP" - }); - - }); - services.AddResponseCompression(options => - { - options.Providers.Add(); - options.Providers.Add(); - options.MimeTypes = - ResponseCompressionDefaults.MimeTypes.Concat( - new[] { "image/jpeg", "image/jpg" }); - options.EnableForHttps = true; - }); - services.Configure(options => - { - options.Level = CompressionLevel.Fastest; + Description = "Custom Url", + Url = "/" }); - services.AddResponseCaching(); - - services.AddHangfire(configuration => configuration - .UseSimpleAssemblyNameTypeSerializer() - .UseRecommendedSerializerSettings() - .UseMemoryStorage()); // UseSQLiteStorage - SQLite has some issues around resuming jobs when aborted - - // Add the processing server as IHostedService - services.AddHangfireServer(options => + c.AddServer(new OpenApiServer() { - options.Queues = new[] {TaskScheduler.ScanQueue, TaskScheduler.DefaultQueue}; + Description = "Local Server", + Url = "http://localhost:5000/", }); - // Add IHostedService for startup tasks - // Any services that should be bootstrapped go here - services.AddHostedService(); - } - // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. - public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, - IHostApplicationLifetime applicationLifetime, IServiceProvider serviceProvider, ICacheService cacheService, - IDirectoryService directoryService, IUnitOfWork unitOfWork, IBackupService backupService, IImageService imageService) + c.AddServer(new OpenApiServer() + { + Url = "https://demo.kavitareader.com/", + Description = "Kavita Demo" + }); + + c.AddServer(new OpenApiServer() + { + Url = "http://" + GetLocalIpAddress() + ":5000/", + Description = "Local IP" + }); + + }); + services.AddResponseCompression(options => { + options.Providers.Add(); + options.Providers.Add(); + options.MimeTypes = + ResponseCompressionDefaults.MimeTypes.Concat( + new[] { "image/jpeg", "image/jpg" }); + options.EnableForHttps = true; + }); + services.Configure(options => + { + options.Level = CompressionLevel.Fastest; + }); - // Apply Migrations - try - { - Task.Run(async () => + services.AddResponseCaching(); + + services.AddHangfire(configuration => configuration + .UseSimpleAssemblyNameTypeSerializer() + .UseRecommendedSerializerSettings() + .UseSQLiteStorage("config/Hangfire.db")); // UseSQLiteStorage - SQLite has some issues around resuming jobs when aborted + + // Add the processing server as IHostedService + services.AddHangfireServer(options => + { + options.Queues = new[] {TaskScheduler.ScanQueue, TaskScheduler.DefaultQueue}; + }); + // Add IHostedService for startup tasks + // Any services that should be bootstrapped go here + services.AddHostedService(); + } + + // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. + public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, + IHostApplicationLifetime applicationLifetime, IServiceProvider serviceProvider, ICacheService cacheService, + IDirectoryService directoryService, IUnitOfWork unitOfWork, IBackupService backupService, IImageService imageService) + { + + // Apply Migrations + try + { + Task.Run(async () => { // Apply all migrations on startup var logger = serviceProvider.GetRequiredService>(); var userManager = serviceProvider.GetRequiredService>(); var themeService = serviceProvider.GetRequiredService(); var dataContext = serviceProvider.GetRequiredService(); + var readingListService = serviceProvider.GetRequiredService(); // Only run this if we are upgrading await MigrateChangePasswordRoles.Migrate(unitOfWork, userManager); - await MigrateRemoveExtraThemes.Migrate(unitOfWork, themeService); - // Only needed for v0.5.5.x and v0.5.6 - await MigrateNormalizedLocalizedName.Migrate(unitOfWork, dataContext, logger); + // only needed for v0.5.4 and v0.6.0 + await MigrateNormalizedEverything.Migrate(unitOfWork, dataContext, logger); + + // v0.6.0 + await MigrateChangeRestrictionRoles.Migrate(unitOfWork, userManager, logger); + await MigrateReadingListAgeRating.Migrate(unitOfWork, dataContext, readingListService, logger); // Update the version in the DB after all migrations are run var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion); @@ -201,134 +226,132 @@ namespace API await unitOfWork.CommitAsync(); }).GetAwaiter() - .GetResult(); - } - catch (Exception ex) - { - var logger = serviceProvider.GetRequiredService>(); - logger.LogCritical(ex, "An error occurred during migration"); - } - - - - app.UseMiddleware(); - - Task.Run(async () => - { - var allowSwaggerUi = (await unitOfWork.SettingsRepository.GetSettingsDtoAsync()) - .EnableSwaggerUi; - - if (env.IsDevelopment() || allowSwaggerUi) - { - app.UseSwagger(); - app.UseSwaggerUI(c => - { - c.SwaggerEndpoint("/swagger/v1/swagger.json", "Kavita API " + BuildInfo.Version); - }); - } - }); - - if (env.IsDevelopment()) - { - app.UseHangfireDashboard(); - } - - app.UseResponseCompression(); - - app.UseForwardedHeaders(); - - app.UseRouting(); - - // Ordering is important. Cors, authentication, authorization - if (env.IsDevelopment()) - { - app.UseCors(policy => policy - .AllowAnyHeader() - .AllowAnyMethod() - .AllowCredentials() // For SignalR token query param - .WithOrigins("http://localhost:4200", $"http://{GetLocalIpAddress()}:4200") - .WithExposedHeaders("Content-Disposition", "Pagination")); - } - - app.UseResponseCaching(); - - app.UseAuthentication(); - - app.UseAuthorization(); - - app.UseDefaultFiles(); - - app.UseStaticFiles(new StaticFileOptions - { - ContentTypeProvider = new FileExtensionContentTypeProvider(), - HttpsCompression = HttpsCompressionMode.Compress, - OnPrepareResponse = ctx => - { - const int durationInSeconds = 60 * 60 * 24; - ctx.Context.Response.Headers[HeaderNames.CacheControl] = "public,max-age=" + durationInSeconds; - } - }); - - app.Use(async (context, next) => - { - // Note: I removed this as I caught Chrome caching api responses when it shouldn't have - // context.Response.GetTypedHeaders().CacheControl = - // new CacheControlHeaderValue() - // { - // Public = false, - // MaxAge = TimeSpan.FromSeconds(10), - // }; - context.Response.Headers[HeaderNames.Vary] = - new[] { "Accept-Encoding" }; - - // Don't let the site be iframed outside the same origin (clickjacking) - context.Response.Headers.XFrameOptions = "SAMEORIGIN"; - - // Setup CSP to ensure we load assets only from these origins - context.Response.Headers.Add("Content-Security-Policy", "frame-ancestors 'none';"); - - await next(); - }); - - app.UseEndpoints(endpoints => - { - endpoints.MapControllers(); - endpoints.MapHub("hubs/messages"); - endpoints.MapHangfireDashboard(); - endpoints.MapFallbackToController("Index", "Fallback"); - }); - - applicationLifetime.ApplicationStopping.Register(OnShutdown); - applicationLifetime.ApplicationStarted.Register(() => - { - try - { - var logger = serviceProvider.GetRequiredService>(); - logger.LogInformation("Kavita - v{Version}", BuildInfo.Version); - } - catch (Exception) - { - /* Swallow Exception */ - } - Console.WriteLine($"Kavita - v{BuildInfo.Version}"); - }); + .GetResult(); } - - private static void OnShutdown() + catch (Exception ex) { - Console.WriteLine("Server is shutting down. Please allow a few seconds to stop any background jobs..."); - TaskScheduler.Client.Dispose(); - System.Threading.Thread.Sleep(1000); - Console.WriteLine("You may now close the application window."); + var logger = serviceProvider.GetRequiredService>(); + logger.LogCritical(ex, "An error occurred during migration"); } - private static string GetLocalIpAddress() + + + app.UseMiddleware(); + + Task.Run(async () => { - using var socket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, 0); - socket.Connect("8.8.8.8", 65530); - if (socket.LocalEndPoint is IPEndPoint endPoint) return endPoint.Address.ToString(); - throw new KavitaException("No network adapters with an IPv4 address in the system!"); + var allowSwaggerUi = (await unitOfWork.SettingsRepository.GetSettingsDtoAsync()) + .EnableSwaggerUi; + + if (env.IsDevelopment() || allowSwaggerUi) + { + app.UseSwagger(); + app.UseSwaggerUI(c => + { + c.SwaggerEndpoint("/swagger/v1/swagger.json", "Kavita API " + BuildInfo.Version); + }); + } + }); + + if (env.IsDevelopment()) + { + app.UseHangfireDashboard(); } + app.UseResponseCompression(); + + app.UseForwardedHeaders(); + + app.UseRouting(); + + // Ordering is important. Cors, authentication, authorization + if (env.IsDevelopment()) + { + app.UseCors(policy => policy + .AllowAnyHeader() + .AllowAnyMethod() + .AllowCredentials() // For SignalR token query param + .WithOrigins("http://localhost:4200", $"http://{GetLocalIpAddress()}:4200") + .WithExposedHeaders("Content-Disposition", "Pagination")); + } + + app.UseResponseCaching(); + + app.UseAuthentication(); + + app.UseAuthorization(); + + app.UseDefaultFiles(); + + app.UseStaticFiles(new StaticFileOptions + { + ContentTypeProvider = new FileExtensionContentTypeProvider(), + HttpsCompression = HttpsCompressionMode.Compress, + OnPrepareResponse = ctx => + { + ctx.Context.Response.Headers[HeaderNames.CacheControl] = "public,max-age=" + TimeSpan.FromHours(24); + } + }); + + app.UseSerilogRequestLogging(opts + => + { + opts.EnrichDiagnosticContext = LogEnricher.EnrichFromRequest; + }); + + app.Use(async (context, next) => + { + context.Response.Headers[HeaderNames.Vary] = + new[] { "Accept-Encoding" }; + + // Don't let the site be iframed outside the same origin (clickjacking) + context.Response.Headers.XFrameOptions = "SAMEORIGIN"; + + // Setup CSP to ensure we load assets only from these origins + context.Response.Headers.Add("Content-Security-Policy", "frame-ancestors 'none';"); + + await next(); + }); + + app.UseEndpoints(endpoints => + { + endpoints.MapControllers(); + endpoints.MapHub("hubs/messages"); + endpoints.MapHub("hubs/logs"); + endpoints.MapHangfireDashboard(); + endpoints.MapFallbackToController("Index", "Fallback"); + }); + + applicationLifetime.ApplicationStopping.Register(OnShutdown); + applicationLifetime.ApplicationStarted.Register(() => + { + try + { + var logger = serviceProvider.GetRequiredService>(); + logger.LogInformation("Kavita - v{Version}", BuildInfo.Version); + } + catch (Exception) + { + /* Swallow Exception */ + } + Console.WriteLine($"Kavita - v{BuildInfo.Version}"); + }); } + + private static void OnShutdown() + { + Console.WriteLine("Server is shutting down. Please allow a few seconds to stop any background jobs..."); + TaskScheduler.Client.Dispose(); + System.Threading.Thread.Sleep(1000); + Console.WriteLine("You may now close the application window."); + } + + private static string GetLocalIpAddress() + { + using var socket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, 0); + socket.Connect("8.8.8.8", 65530); + if (socket.LocalEndPoint is IPEndPoint endPoint) return endPoint.Address.ToString(); + throw new KavitaException("No network adapters with an IPv4 address in the system!"); + } + } diff --git a/API/config/appsettings.Development.json b/API/config/appsettings.Development.json index bd19064c4..2bb2debc0 100644 --- a/API/config/appsettings.Development.json +++ b/API/config/appsettings.Development.json @@ -1,22 +1,4 @@ { - "ConnectionStrings": { - "DefaultConnection": "Data source=config//kavita.db" - }, "TokenKey": "super secret unguessable key", - "Logging": { - "LogLevel": { - "Default": "Debug", - "Microsoft": "Error", - "Microsoft.Hosting.Lifetime": "Error", - "Hangfire": "Error", - "Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error" - }, - "File": { - "Path": "config//logs/kavita.log", - "Append": "True", - "FileSizeLimitBytes": 26214400, - "MaxRollingFiles": 1 - } - }, "Port": 5000 } diff --git a/API/config/appsettings.json b/API/config/appsettings.json index 19637b881..be6c0b319 100644 --- a/API/config/appsettings.json +++ b/API/config/appsettings.json @@ -1,22 +1,4 @@ { - "ConnectionStrings": { - "DefaultConnection": "Data source=config/kavita.db" - }, "TokenKey": "super secret unguessable key", - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft": "Error", - "Microsoft.Hosting.Lifetime": "Error", - "Hangfire": "Error", - "Microsoft.AspNetCore.Hosting.Internal.WebHost": "Error" - }, - "File": { - "Path": "config/logs/kavita.log", - "Append": "True", - "FileSizeLimitBytes": 10485760, - "MaxRollingFiles": 1 - } - }, "Port": 5000 } diff --git a/Dockerfile b/Dockerfile index c8e090534..c7757581c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,7 +29,7 @@ EXPOSE 5000 WORKDIR /kavita -HEALTHCHECK --interval=30s --timeout=15s --start-period=30s --retries=3 CMD curl --fail http://localhost:5000 || exit 1 +HEALTHCHECK --interval=30s --timeout=15s --start-period=30s --retries=3 CMD curl --fail http://localhost:5000/api/health || exit 1 ENTRYPOINT [ "/bin/bash" ] CMD ["/entrypoint.sh"] diff --git a/Kavita.Common/Configuration.cs b/Kavita.Common/Configuration.cs index 55aa99598..0302372d6 100644 --- a/Kavita.Common/Configuration.cs +++ b/Kavita.Common/Configuration.cs @@ -5,348 +5,139 @@ using System.Text.Json; using Kavita.Common.EnvironmentInfo; using Microsoft.Extensions.Hosting; -namespace Kavita.Common +namespace Kavita.Common; + +public static class Configuration { - public static class Configuration + public static readonly string AppSettingsFilename = Path.Join("config", GetAppSettingFilename()); + + public static int Port { - public static readonly string AppSettingsFilename = Path.Join("config", GetAppSettingFilename()); + get => GetPort(GetAppSettingFilename()); + set => SetPort(GetAppSettingFilename(), value); + } - public static string Branch + public static string JwtToken + { + get => GetJwtToken(GetAppSettingFilename()); + set => SetJwtToken(GetAppSettingFilename(), value); + } + + private static string GetAppSettingFilename() + { + if (!string.IsNullOrEmpty(AppSettingsFilename)) { - get => GetBranch(GetAppSettingFilename()); - set => SetBranch(GetAppSettingFilename(), value); + return AppSettingsFilename; } - public static int Port - { - get => GetPort(GetAppSettingFilename()); - set => SetPort(GetAppSettingFilename(), value); - } + var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); + var isDevelopment = environment == Environments.Development; + return "appsettings" + (isDevelopment ? ".Development" : string.Empty) + ".json"; + } - public static string JwtToken - { - get => GetJwtToken(GetAppSettingFilename()); - set => SetJwtToken(GetAppSettingFilename(), value); - } + #region JWT Token - public static string LogLevel + private static string GetJwtToken(string filePath) + { + try { - get => GetLogLevel(GetAppSettingFilename()); - set => SetLogLevel(GetAppSettingFilename(), value); - } + var json = File.ReadAllText(filePath); + var jsonObj = JsonSerializer.Deserialize(json); + const string key = "TokenKey"; - public static string LogPath - { - get => GetLoggingFile(GetAppSettingFilename()); - set => SetLoggingFile(GetAppSettingFilename(), value); - } - - public static string DatabasePath - { - get => GetDatabasePath(GetAppSettingFilename()); - set => SetDatabasePath(GetAppSettingFilename(), value); - } - - private static string GetAppSettingFilename() - { - if (!string.IsNullOrEmpty(AppSettingsFilename)) + if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) { - return AppSettingsFilename; - } - - var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); - var isDevelopment = environment == Environments.Development; - return "appsettings" + (isDevelopment ? ".Development" : string.Empty) + ".json"; - } - - #region JWT Token - - private static string GetJwtToken(string filePath) - { - try - { - var json = File.ReadAllText(filePath); - var jsonObj = JsonSerializer.Deserialize(json); - const string key = "TokenKey"; - - if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) - { - return tokenElement.GetString(); - } - - return string.Empty; - } - catch (Exception ex) - { - Console.WriteLine("Error reading app settings: " + ex.Message); + return tokenElement.GetString(); } return string.Empty; } - - private static void SetJwtToken(string filePath, string token) + catch (Exception ex) { - try - { - var currentToken = GetJwtToken(filePath); - var json = File.ReadAllText(filePath) - .Replace("\"TokenKey\": \"" + currentToken, "\"TokenKey\": \"" + token); - File.WriteAllText(filePath, json); - } - catch (Exception) - { - /* Swallow exception */ - } + Console.WriteLine("Error reading app settings: " + ex.Message); } - public static bool CheckIfJwtTokenSet() - { - try - { - return GetJwtToken(GetAppSettingFilename()) != "super secret unguessable key"; - } - catch (Exception ex) - { - Console.WriteLine("Error writing app settings: " + ex.Message); - } + return string.Empty; + } - return false; + private static void SetJwtToken(string filePath, string token) + { + try + { + var currentToken = GetJwtToken(filePath); + var json = File.ReadAllText(filePath) + .Replace("\"TokenKey\": \"" + currentToken, "\"TokenKey\": \"" + token); + File.WriteAllText(filePath, json); + } + catch (Exception) + { + /* Swallow exception */ + } + } + + public static bool CheckIfJwtTokenSet() + { + try + { + return GetJwtToken(GetAppSettingFilename()) != "super secret unguessable key"; + } + catch (Exception ex) + { + Console.WriteLine("Error writing app settings: " + ex.Message); } - #endregion + return false; + } - #region Port + #endregion - private static void SetPort(string filePath, int port) + #region Port + + private static void SetPort(string filePath, int port) + { + if (new OsInfo(Array.Empty()).IsDocker) { - if (new OsInfo(Array.Empty()).IsDocker) - { - return; - } - - try - { - var currentPort = GetPort(filePath); - var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port); - File.WriteAllText(filePath, json); - } - catch (Exception) - { - /* Swallow Exception */ - } + return; } - private static int GetPort(string filePath) + try { - const int defaultPort = 5000; - if (new OsInfo(Array.Empty()).IsDocker) - { - return defaultPort; - } - - try - { - var json = File.ReadAllText(filePath); - var jsonObj = JsonSerializer.Deserialize(json); - const string key = "Port"; - - if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) - { - return tokenElement.GetInt32(); - } - } - catch (Exception ex) - { - Console.WriteLine("Error writing app settings: " + ex.Message); - } + var currentPort = GetPort(filePath); + var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port); + File.WriteAllText(filePath, json); + } + catch (Exception) + { + /* Swallow Exception */ + } + } + private static int GetPort(string filePath) + { + const int defaultPort = 5000; + if (new OsInfo(Array.Empty()).IsDocker) + { return defaultPort; } - #endregion - - #region LogLevel - - private static void SetLogLevel(string filePath, string logLevel) + try { - try + var json = File.ReadAllText(filePath); + var jsonObj = JsonSerializer.Deserialize(json); + const string key = "Port"; + + if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) { - var currentLevel = GetLogLevel(filePath); - var json = File.ReadAllText(filePath) - .Replace($"\"Default\": \"{currentLevel}\"", $"\"Default\": \"{logLevel}\""); - File.WriteAllText(filePath, json); - } - catch (Exception) - { - /* Swallow Exception */ + return tokenElement.GetInt32(); } } - - private static string GetLogLevel(string filePath) + catch (Exception ex) { - try - { - var json = File.ReadAllText(filePath); - var jsonObj = JsonSerializer.Deserialize(json); - - if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement)) - { - foreach (var property in tokenElement.EnumerateObject()) - { - if (!property.Name.Equals("LogLevel")) continue; - foreach (var logProperty in property.Value.EnumerateObject().Where(logProperty => logProperty.Name.Equals("Default"))) - { - return logProperty.Value.GetString(); - } - } - } - } - catch (Exception ex) - { - Console.WriteLine("Error writing app settings: " + ex.Message); - } - - return "Information"; + Console.WriteLine("Error writing app settings: " + ex.Message); } - #endregion - - private static string GetBranch(string filePath) - { - const string defaultBranch = "main"; - - try - { - var json = File.ReadAllText(filePath); - var jsonObj = JsonSerializer.Deserialize(json); - const string key = "Branch"; - - if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) - { - return tokenElement.GetString(); - } - } - catch (Exception ex) - { - Console.WriteLine("Error reading app settings: " + ex.Message); - } - - return defaultBranch; - } - - private static void SetBranch(string filePath, string updatedBranch) - { - try - { - var currentBranch = GetBranch(filePath); - var json = File.ReadAllText(filePath) - .Replace("\"Branch\": " + currentBranch, "\"Branch\": " + updatedBranch); - File.WriteAllText(filePath, json); - } - catch (Exception) - { - /* Swallow Exception */ - } - } - - private static string GetLoggingFile(string filePath) - { - const string defaultFile = "config/logs/kavita.log"; - - try - { - var json = File.ReadAllText(filePath); - var jsonObj = JsonSerializer.Deserialize(json); - - if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement)) - { - foreach (var property in tokenElement.EnumerateObject()) - { - if (!property.Name.Equals("File")) continue; - foreach (var logProperty in property.Value.EnumerateObject()) - { - if (logProperty.Name.Equals("Path")) - { - return logProperty.Value.GetString(); - } - } - } - } - } - catch (Exception ex) - { - Console.WriteLine("Error writing app settings: " + ex.Message); - } - - return defaultFile; - } - - /// - /// This should NEVER be called except by - /// - /// - /// - private static void SetLoggingFile(string filePath, string directory) - { - try - { - var currentFile = GetLoggingFile(filePath); - var json = File.ReadAllText(filePath) - .Replace("\"Path\": \"" + currentFile + "\"", "\"Path\": \"" + directory + "\""); - File.WriteAllText(filePath, json); - } - catch (Exception ex) - { - /* Swallow Exception */ - Console.WriteLine(ex); - } - } - - private static string GetDatabasePath(string filePath) - { - const string defaultFile = "config/kavita.db"; - - try - { - var json = File.ReadAllText(filePath); - var jsonObj = JsonSerializer.Deserialize(json); - - if (jsonObj.TryGetProperty("ConnectionStrings", out JsonElement tokenElement)) - { - foreach (var property in tokenElement.EnumerateObject()) - { - if (!property.Name.Equals("DefaultConnection")) continue; - return property.Value.GetString(); - } - } - } - catch (Exception ex) - { - Console.WriteLine("Error writing app settings: " + ex.Message); - } - - return defaultFile; - } - - /// - /// This should NEVER be called except by MigrateConfigFiles - /// - /// - /// - private static void SetDatabasePath(string filePath, string updatedPath) - { - try - { - var existingString = GetDatabasePath(filePath); - var json = File.ReadAllText(filePath) - .Replace(existingString, - "Data source=" + updatedPath); - File.WriteAllText(filePath, json); - } - catch (Exception) - { - /* Swallow Exception */ - } - } + return defaultPort; } + + #endregion } diff --git a/Kavita.Common/EnvironmentInfo/BuildInfo.cs b/Kavita.Common/EnvironmentInfo/BuildInfo.cs index 116c07866..84f5f387c 100644 --- a/Kavita.Common/EnvironmentInfo/BuildInfo.cs +++ b/Kavita.Common/EnvironmentInfo/BuildInfo.cs @@ -1,33 +1,11 @@ using System; -using System.Linq; using System.Reflection; namespace Kavita.Common.EnvironmentInfo; public static class BuildInfo { - static BuildInfo() - { - var assembly = Assembly.GetExecutingAssembly(); - - Version = assembly.GetName().Version; - - var attributes = assembly.GetCustomAttributes(true); - - Branch = "unknown"; - - var config = attributes.OfType().FirstOrDefault(); - if (config != null) - { - Branch = config.Configuration; // NOTE: This is not helpful, better to have main/develop branch - } - - Release = $"{Version}-{Branch}"; - } - + public static readonly Version Version = Assembly.GetExecutingAssembly().GetName().Version; public static string AppName { get; } = "Kavita"; - public static Version Version { get; } - public static string Branch { get; } - public static string Release { get; } } diff --git a/Kavita.Common/EnvironmentInfo/IOsInfo.cs b/Kavita.Common/EnvironmentInfo/IOsInfo.cs index cb5a85e09..e3453c3d6 100644 --- a/Kavita.Common/EnvironmentInfo/IOsInfo.cs +++ b/Kavita.Common/EnvironmentInfo/IOsInfo.cs @@ -4,157 +4,156 @@ using System.Diagnostics; using System.IO; using System.Linq; -namespace Kavita.Common.EnvironmentInfo +namespace Kavita.Common.EnvironmentInfo; + +public class OsInfo : IOsInfo { - public class OsInfo : IOsInfo + public static Os Os { get; } + + public static bool IsNotWindows => !IsWindows; + public static bool IsLinux => Os is Os.Linux or Os.LinuxMusl or Os.Bsd; + public static bool IsOsx => Os == Os.Osx; + public static bool IsWindows => Os == Os.Windows; + + // this needs to not be static so we can mock it + public bool IsDocker { get; } + + public string Version { get; } + public string Name { get; } + public string FullName { get; } + + static OsInfo() { - public static Os Os { get; } + var platform = Environment.OSVersion.Platform; - public static bool IsNotWindows => !IsWindows; - public static bool IsLinux => Os == Os.Linux || Os == Os.LinuxMusl || Os == Os.Bsd; - public static bool IsOsx => Os == Os.Osx; - public static bool IsWindows => Os == Os.Windows; - - // this needs to not be static so we can mock it - public bool IsDocker { get; } - - public string Version { get; } - public string Name { get; } - public string FullName { get; } - - static OsInfo() + switch (platform) { - var platform = Environment.OSVersion.Platform; - - switch (platform) + case PlatformID.Win32NT: { - case PlatformID.Win32NT: - { - Os = Os.Windows; - break; - } - - case PlatformID.MacOSX: - case PlatformID.Unix: - { - Os = GetPosixFlavour(); - break; - } + Os = Os.Windows; + break; } + case PlatformID.MacOSX: + case PlatformID.Unix: + { + Os = GetPosixFlavour(); + break; + } } - public OsInfo(IEnumerable versionAdapters) + } + + public OsInfo(IEnumerable versionAdapters) + { + OsVersionModel osInfo = null; + + foreach (var osVersionAdapter in versionAdapters.Where(c => c.Enabled)) { - OsVersionModel osInfo = null; - - foreach (var osVersionAdapter in versionAdapters.Where(c => c.Enabled)) + try { - try - { - osInfo = osVersionAdapter.Read(); - } - catch (Exception e) - { - Console.WriteLine("Couldn't get OS Version info: " + e.Message); - } - - if (osInfo != null) - { - break; - } + osInfo = osVersionAdapter.Read(); + } + catch (Exception e) + { + Console.WriteLine("Couldn't get OS Version info: " + e.Message); } if (osInfo != null) { - Name = osInfo.Name; - Version = osInfo.Version; - FullName = osInfo.FullName; - } - else - { - Name = Os.ToString(); - FullName = Name; - } - - if (IsLinux && File.Exists("/proc/1/cgroup") && File.ReadAllText("/proc/1/cgroup").Contains("/docker/")) - { - IsDocker = true; + break; } } - public OsInfo() + if (osInfo != null) + { + Name = osInfo.Name; + Version = osInfo.Version; + FullName = osInfo.FullName; + } + else { Name = Os.ToString(); FullName = Name; - - if (IsLinux && File.Exists("/proc/1/cgroup") && File.ReadAllText("/proc/1/cgroup").Contains("/docker/")) - { - IsDocker = true; - } } - private static Os GetPosixFlavour() + if (IsLinux && File.Exists("/proc/1/cgroup") && File.ReadAllText("/proc/1/cgroup").Contains("/docker/")) { - var output = RunAndCapture("uname", "-s"); + IsDocker = true; + } + } - if (output.StartsWith("Darwin")) - { - return Os.Osx; - } - else if (output.Contains("BSD")) - { - return Os.Bsd; - } - else - { + public OsInfo() + { + Name = Os.ToString(); + FullName = Name; + + if (IsLinux && File.Exists("/proc/1/cgroup") && File.ReadAllText("/proc/1/cgroup").Contains("/docker/")) + { + IsDocker = true; + } + } + + private static Os GetPosixFlavour() + { + var output = RunAndCapture("uname", "-s"); + + if (output.StartsWith("Darwin")) + { + return Os.Osx; + } + else if (output.Contains("BSD")) + { + return Os.Bsd; + } + else + { #if ISMUSL return Os.LinuxMusl; #else - return Os.Linux; + return Os.Linux; #endif - } } + } - private static string RunAndCapture(string filename, string args) + private static string RunAndCapture(string filename, string args) + { + var p = new Process { - var p = new Process + StartInfo = { - StartInfo = - { - FileName = filename, - Arguments = args, - UseShellExecute = false, - CreateNoWindow = true, - RedirectStandardOutput = true - } - }; + FileName = filename, + Arguments = args, + UseShellExecute = false, + CreateNoWindow = true, + RedirectStandardOutput = true + } + }; - p.Start(); + p.Start(); - // To avoid deadlocks, always read the output stream first and then wait. - var output = p.StandardOutput.ReadToEnd(); - p.WaitForExit(1000); + // To avoid deadlocks, always read the output stream first and then wait. + var output = p.StandardOutput.ReadToEnd(); + p.WaitForExit(1000); - return output; - } - } - - public interface IOsInfo - { - string Version { get; } - string Name { get; } - string FullName { get; } - - bool IsDocker { get; } - } - - public enum Os - { - Windows, - Linux, - Osx, - LinuxMusl, - Bsd + return output; } } + +public interface IOsInfo +{ + string Version { get; } + string Name { get; } + string FullName { get; } + + bool IsDocker { get; } +} + +public enum Os +{ + Windows, + Linux, + Osx, + LinuxMusl, + Bsd +} diff --git a/Kavita.Common/EnvironmentInfo/IOsVersionAdapter.cs b/Kavita.Common/EnvironmentInfo/IOsVersionAdapter.cs index fbf4403d3..827a44ac8 100644 --- a/Kavita.Common/EnvironmentInfo/IOsVersionAdapter.cs +++ b/Kavita.Common/EnvironmentInfo/IOsVersionAdapter.cs @@ -1,8 +1,7 @@ -namespace Kavita.Common.EnvironmentInfo +namespace Kavita.Common.EnvironmentInfo; + +public interface IOsVersionAdapter { - public interface IOsVersionAdapter - { - bool Enabled { get; } - OsVersionModel Read(); - } -} \ No newline at end of file + bool Enabled { get; } + OsVersionModel Read(); +} diff --git a/Kavita.Common/EnvironmentInfo/OsVersionModel.cs b/Kavita.Common/EnvironmentInfo/OsVersionModel.cs index 9e91daa18..a365526ff 100644 --- a/Kavita.Common/EnvironmentInfo/OsVersionModel.cs +++ b/Kavita.Common/EnvironmentInfo/OsVersionModel.cs @@ -1,27 +1,26 @@ -namespace Kavita.Common.EnvironmentInfo +namespace Kavita.Common.EnvironmentInfo; + +public class OsVersionModel { - public class OsVersionModel + public OsVersionModel(string name, string version, string fullName = null) { - public OsVersionModel(string name, string version, string fullName = null) + Name = Trim(name); + Version = Trim(version); + + if (string.IsNullOrWhiteSpace(fullName)) { - Name = Trim(name); - Version = Trim(version); - - if (string.IsNullOrWhiteSpace(fullName)) - { - fullName = $"{Name} {Version}"; - } - - FullName = Trim(fullName); + fullName = $"{Name} {Version}"; } - private static string Trim(string source) - { - return source.Trim().Trim('"', '\''); - } - - public string Name { get; } - public string FullName { get; } - public string Version { get; } + FullName = Trim(fullName); } -} \ No newline at end of file + + private static string Trim(string source) + { + return source.Trim().Trim('"', '\''); + } + + public string Name { get; } + public string FullName { get; } + public string Version { get; } +} diff --git a/Kavita.Common/Extensions/EnumExtensions.cs b/Kavita.Common/Extensions/EnumExtensions.cs index d35aeb805..e672d8050 100644 --- a/Kavita.Common/Extensions/EnumExtensions.cs +++ b/Kavita.Common/Extensions/EnumExtensions.cs @@ -1,21 +1,20 @@ using System.ComponentModel; -namespace Kavita.Common.Extensions +namespace Kavita.Common.Extensions; + +public static class EnumExtensions { - public static class EnumExtensions - { public static string ToDescription(this TEnum value) where TEnum : struct { - var fi = value.GetType().GetField(value.ToString() ?? string.Empty); + var fi = value.GetType().GetField(value.ToString() ?? string.Empty); - if (fi == null) - { - return value.ToString(); - } + if (fi == null) + { + return value.ToString(); + } - var attributes = (DescriptionAttribute[])fi.GetCustomAttributes(typeof(DescriptionAttribute), false); + var attributes = (DescriptionAttribute[])fi.GetCustomAttributes(typeof(DescriptionAttribute), false); - return attributes is {Length: > 0} ? attributes[0].Description : value.ToString(); + return attributes is {Length: > 0} ? attributes[0].Description : value.ToString(); } - } } diff --git a/Kavita.Common/Extensions/PathExtensions.cs b/Kavita.Common/Extensions/PathExtensions.cs index 5ebb96673..904589630 100644 --- a/Kavita.Common/Extensions/PathExtensions.cs +++ b/Kavita.Common/Extensions/PathExtensions.cs @@ -1,12 +1,11 @@ using System.IO; -namespace Kavita.Common.Extensions +namespace Kavita.Common.Extensions; + +public static class PathExtensions { - public static class PathExtensions - { public static string GetParentDirectory(string filePath) { - return Path.GetDirectoryName(filePath); + return Path.GetDirectoryName(filePath); } - } } diff --git a/Kavita.Common/HashUtil.cs b/Kavita.Common/HashUtil.cs index f959f0af4..8b808b9c1 100644 --- a/Kavita.Common/HashUtil.cs +++ b/Kavita.Common/HashUtil.cs @@ -1,56 +1,55 @@ using System; using System.Text; -namespace Kavita.Common +namespace Kavita.Common; + +public static class HashUtil { - public static class HashUtil + private static string CalculateCrc(string input) { - private static string CalculateCrc(string input) + uint mCrc = 0xffffffff; + byte[] bytes = Encoding.UTF8.GetBytes(input); + foreach (byte myByte in bytes) { - uint mCrc = 0xffffffff; - byte[] bytes = Encoding.UTF8.GetBytes(input); - foreach (byte myByte in bytes) + mCrc ^= (uint)myByte << 24; + for (var i = 0; i < 8; i++) { - mCrc ^= (uint)myByte << 24; - for (var i = 0; i < 8; i++) + if ((Convert.ToUInt32(mCrc) & 0x80000000) == 0x80000000) { - if ((Convert.ToUInt32(mCrc) & 0x80000000) == 0x80000000) - { - mCrc = (mCrc << 1) ^ 0x04C11DB7; - } - else - { - mCrc <<= 1; - } + mCrc = (mCrc << 1) ^ 0x04C11DB7; + } + else + { + mCrc <<= 1; } } - - return $"{mCrc:x8}"; } - /// - /// Calculates a unique, Anonymous Token that will represent this unique Kavita installation. - /// - /// - public static string AnonymousToken() + return $"{mCrc:x8}"; + } + + /// + /// Calculates a unique, Anonymous Token that will represent this unique Kavita installation. + /// + /// + public static string AnonymousToken() + { + var seed = $"{Environment.ProcessorCount}_{Environment.OSVersion.Platform}_{Configuration.JwtToken}_{Environment.UserName}"; + return CalculateCrc(seed); + } + + /// + /// Generates a unique API key to this server instance + /// + /// + public static string ApiKey() + { + var id = Guid.NewGuid(); + if (id.Equals(Guid.Empty)) { - var seed = $"{Environment.ProcessorCount}_{Environment.OSVersion.Platform}_{Configuration.JwtToken}_{Environment.UserName}"; - return CalculateCrc(seed); + id = Guid.NewGuid(); } - /// - /// Generates a unique API key to this server instance - /// - /// - public static string ApiKey() - { - var id = Guid.NewGuid(); - if (id.Equals(Guid.Empty)) - { - id = Guid.NewGuid(); - } - - return id.ToString(); - } + return id.ToString(); } } diff --git a/Kavita.Common/Kavita.Common.csproj b/Kavita.Common/Kavita.Common.csproj index db5f58646..a46dfdf4b 100644 --- a/Kavita.Common/Kavita.Common.csproj +++ b/Kavita.Common/Kavita.Common.csproj @@ -4,8 +4,9 @@ net6.0 kavitareader.com Kavita - 0.5.6.0 + 0.6.0.0 en + true @@ -13,7 +14,7 @@ - + all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/Kavita.Common/KavitaException.cs b/Kavita.Common/KavitaException.cs index f7942c8b1..b624e0111 100644 --- a/Kavita.Common/KavitaException.cs +++ b/Kavita.Common/KavitaException.cs @@ -1,25 +1,24 @@ using System; using System.Runtime.Serialization; -namespace Kavita.Common +namespace Kavita.Common; + +/// +/// These are used for errors to send to the UI that should not be reported to Sentry +/// +[Serializable] +public class KavitaException : Exception { - /// - /// These are used for errors to send to the UI that should not be reported to Sentry - /// - [Serializable] - public class KavitaException : Exception - { - public KavitaException() - { } + public KavitaException() + { } - public KavitaException(string message) : base(message) - { } + public KavitaException(string message) : base(message) + { } - public KavitaException(string message, Exception inner) - : base(message, inner) { } + public KavitaException(string message, Exception inner) + : base(message, inner) { } - protected KavitaException(SerializationInfo info, StreamingContext context) - : base(info, context) - { } - } + protected KavitaException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } } diff --git a/Kavita.sln.DotSettings b/Kavita.sln.DotSettings index fb5e739fb..55f8e0090 100644 --- a/Kavita.sln.DotSettings +++ b/Kavita.sln.DotSettings @@ -2,6 +2,8 @@ ExplicitlyExcluded True True + True + True True True True \ No newline at end of file diff --git a/TestData b/TestData new file mode 160000 index 000000000..4f5750025 --- /dev/null +++ b/TestData @@ -0,0 +1 @@ +Subproject commit 4f5750025a1c0b48cd72eaa6f1b61642c41f147f diff --git a/UI/Web/package-lock.json b/UI/Web/package-lock.json index 0c437f9ba..1e6b9b82b 100644 --- a/UI/Web/package-lock.json +++ b/UI/Web/package-lock.json @@ -2637,9 +2637,9 @@ } }, "@angular/cdk": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/@angular/cdk/-/cdk-13.2.2.tgz", - "integrity": "sha512-cT5DIaz+NI9IGb3X61Wh26+L6zdRcOXT1BP37iRbK2Qa2qM8/0VNeK6hrBBIblyoHKR/WUmRlS8XYf6mmArpZw==", + "version": "13.3.9", + "resolved": "https://registry.npmjs.org/@angular/cdk/-/cdk-13.3.9.tgz", + "integrity": "sha512-XCuCbeuxWFyo3EYrgEYx7eHzwl76vaWcxtWXl00ka8d+WAOtMQ6Tf1D98ybYT5uwF9889fFpXAPw98mVnlo3MA==", "requires": { "parse5": "^5.0.0", "tslib": "^2.3.0" @@ -12650,9 +12650,9 @@ } }, "ngx-extended-pdf-viewer": { - "version": "14.5.3", - "resolved": "https://registry.npmjs.org/ngx-extended-pdf-viewer/-/ngx-extended-pdf-viewer-14.5.3.tgz", - "integrity": "sha512-9pqnbonKcu/6SIwPe3yCfHzsO1fgO7qIwETHD7UuS2kAG5GM7VkEwrqMoF7qsZ0Lq/rkqFBcGsS4GYW5JK+oEQ==", + "version": "15.0.2", + "resolved": "https://registry.npmjs.org/ngx-extended-pdf-viewer/-/ngx-extended-pdf-viewer-15.0.2.tgz", + "integrity": "sha512-3cuJ87hqod8b/DiIjLNCYxLZYkfi+bm0PsjMFw4GnGfjKB7QJv0p/+KvrCdD68k18Aim5Sd5BMZhF2pHelp1mw==", "requires": { "lodash.deburr": "^4.1.0", "tslib": "^2.3.0" @@ -12666,14 +12666,6 @@ "tslib": "^2.3.0" } }, - "ngx-infinite-scroll": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/ngx-infinite-scroll/-/ngx-infinite-scroll-13.0.2.tgz", - "integrity": "sha512-RSezL0DUxo1B57SyRMOSt3a/5lLXJs6P8lavtxOh10uhX+hn662cMYHUO7LiU2a/vJxef2R020s4jkUqhnXTcg==", - "requires": { - "tslib": "^2.3.0" - } - }, "ngx-toastr": { "version": "14.2.1", "resolved": "https://registry.npmjs.org/ngx-toastr/-/ngx-toastr-14.2.1.tgz", diff --git a/UI/Web/package.json b/UI/Web/package.json index 53a664d47..c78cdc29e 100644 --- a/UI/Web/package.json +++ b/UI/Web/package.json @@ -5,7 +5,7 @@ "ng": "ng", "start": "ng serve", "build": "ng build", - "prod": "ng build --configuration production", + "prod": "ng build --configuration production --aot --output-hashing=all", "explore": "ng build --stats-json && webpack-bundle-analyzer dist/stats.json", "test": "jest", "test:watch": "jest --watch", @@ -39,9 +39,8 @@ "lazysizes": "^5.3.2", "ng-circle-progress": "^1.6.0", "ngx-color-picker": "^12.0.0", - "ngx-extended-pdf-viewer": "^14.5.2", + "ngx-extended-pdf-viewer": "^15.0.0", "ngx-file-drop": "^14.0.1", - "ngx-infinite-scroll": "^13.0.2", "ngx-toastr": "^14.2.1", "requires": "^1.0.2", "rxjs": "~7.5.4", diff --git a/UI/Web/src/app/_models/age-restriction.ts b/UI/Web/src/app/_models/age-restriction.ts new file mode 100644 index 000000000..e5be030b1 --- /dev/null +++ b/UI/Web/src/app/_models/age-restriction.ts @@ -0,0 +1,6 @@ +import { AgeRating } from "./metadata/age-rating"; + +export interface AgeRestriction { + ageRating: AgeRating; + includeUnknowns: boolean; +} \ No newline at end of file diff --git a/UI/Web/src/app/_models/device/device-platform.ts b/UI/Web/src/app/_models/device/device-platform.ts new file mode 100644 index 000000000..3c88a2160 --- /dev/null +++ b/UI/Web/src/app/_models/device/device-platform.ts @@ -0,0 +1,8 @@ +export enum DevicePlatform { + Custom = 0, + PocketBook = 1, + Kindle = 2, + Kobo = 3 +} + +export const devicePlatforms = [DevicePlatform.Custom, DevicePlatform.Kindle, DevicePlatform.Kobo, DevicePlatform.PocketBook]; \ No newline at end of file diff --git a/UI/Web/src/app/_models/device/device.ts b/UI/Web/src/app/_models/device/device.ts new file mode 100644 index 000000000..435be4937 --- /dev/null +++ b/UI/Web/src/app/_models/device/device.ts @@ -0,0 +1,9 @@ +import { DevicePlatform } from "./device-platform"; + +export interface Device { + id: number; + name: string; + platform: DevicePlatform; + emailAddress: string; + lastUsed: string; +} \ No newline at end of file diff --git a/UI/Web/src/app/_models/email/update-email-response.ts b/UI/Web/src/app/_models/email/update-email-response.ts new file mode 100644 index 000000000..eaaf64580 --- /dev/null +++ b/UI/Web/src/app/_models/email/update-email-response.ts @@ -0,0 +1,10 @@ +export interface UpdateEmailResponse { + /** + * Did the user not have an existing email + */ + hadNoExistingEmail: boolean; + /** + * Was an email sent (ie is this server accessible) + */ + emailSent: boolean; +} \ No newline at end of file diff --git a/UI/Web/src/app/_models/member.ts b/UI/Web/src/app/_models/member.ts index 874dba535..adfbd9d93 100644 --- a/UI/Web/src/app/_models/member.ts +++ b/UI/Web/src/app/_models/member.ts @@ -1,3 +1,4 @@ +import { AgeRestriction } from './age-restriction'; import { Library } from './library'; export interface Member { @@ -6,7 +7,7 @@ export interface Member { email: string; lastActive: string; // datetime created: string; // datetime - //isAdmin: boolean; roles: string[]; libraries: Library[]; + ageRestriction: AgeRestriction; } \ No newline at end of file diff --git a/UI/Web/src/app/_models/metadata/age-rating.ts b/UI/Web/src/app/_models/metadata/age-rating.ts index d44a8e250..cbb2e86a5 100644 --- a/UI/Web/src/app/_models/metadata/age-rating.ts +++ b/UI/Web/src/app/_models/metadata/age-rating.ts @@ -1,4 +1,8 @@ export enum AgeRating { + /** + * This is not a valid state for Series/Chapters, but used for Restricted Profiles + */ + NotApplicable = -1, Unknown = 0, AdultsOnly = 1, EarlyChildhood = 2, diff --git a/UI/Web/src/app/_models/preferences/preferences.ts b/UI/Web/src/app/_models/preferences/preferences.ts index 1fd31856d..aeb92b3bf 100644 --- a/UI/Web/src/app/_models/preferences/preferences.ts +++ b/UI/Web/src/app/_models/preferences/preferences.ts @@ -35,6 +35,7 @@ export interface Preferences { globalPageLayoutMode: PageLayoutMode; blurUnreadSummaries: boolean; promptForDownloadSize: boolean; + noTransitions: boolean; } export const readingDirections = [{text: 'Left to Right', value: ReadingDirection.LeftToRight}, {text: 'Right to Left', value: ReadingDirection.RightToLeft}]; diff --git a/UI/Web/src/app/_models/series-detail/related-series.ts b/UI/Web/src/app/_models/series-detail/related-series.ts index 02d394382..f0cfc230b 100644 --- a/UI/Web/src/app/_models/series-detail/related-series.ts +++ b/UI/Web/src/app/_models/series-detail/related-series.ts @@ -14,4 +14,5 @@ export interface RelatedSeries { alternativeVersions: Array; doujinshis: Array; parent: Array; -} \ No newline at end of file + editions: Array; +} diff --git a/UI/Web/src/app/_models/series-detail/relation-kind.ts b/UI/Web/src/app/_models/series-detail/relation-kind.ts index 203401af3..77470041c 100644 --- a/UI/Web/src/app/_models/series-detail/relation-kind.ts +++ b/UI/Web/src/app/_models/series-detail/relation-kind.ts @@ -13,7 +13,8 @@ export enum RelationKind { /** * This is UI only. Backend will generate Parent series for everything but Prequel/Sequel */ - Parent = 12 + Parent = 12, + Edition = 13 } export const RelationKinds = [ @@ -26,6 +27,7 @@ export const RelationKinds = [ {text: 'Side Story', value: RelationKind.SideStory}, {text: 'Character', value: RelationKind.Character}, {text: 'Contains', value: RelationKind.Contains}, + {text: 'Edition', value: RelationKind.Edition}, {text: 'Doujinshi', value: RelationKind.Doujinshi}, {text: 'Other', value: RelationKind.Other}, -]; \ No newline at end of file +]; diff --git a/UI/Web/src/app/_models/series-filter.ts b/UI/Web/src/app/_models/series-filter.ts index e346ccd4f..439d7f508 100644 --- a/UI/Web/src/app/_models/series-filter.ts +++ b/UI/Web/src/app/_models/series-filter.ts @@ -6,6 +6,11 @@ export interface FilterItem { selected: boolean; } +export interface Range { + min: T; + max: T; +} + export interface SeriesFilter { formats: Array; libraries: Array, @@ -30,6 +35,7 @@ export interface SeriesFilter { languages: Array; publicationStatus: Array; seriesNameQuery: string; + releaseYearRange: Range | null; } export interface SortOptions { @@ -42,7 +48,8 @@ export enum SortField { Created = 2, LastModified = 3, LastChapterAdded = 4, - TimeToRead = 5 + TimeToRead = 5, + ReleaseYear = 6, } export interface ReadStatus { diff --git a/UI/Web/src/app/_models/user.ts b/UI/Web/src/app/_models/user.ts index 626e56a5f..8aa1467bc 100644 --- a/UI/Web/src/app/_models/user.ts +++ b/UI/Web/src/app/_models/user.ts @@ -1,3 +1,4 @@ +import { AgeRestriction } from './age-restriction'; import { Preferences } from './preferences/preferences'; // This interface is only used for login and storing/retreiving JWT from local storage @@ -8,4 +9,6 @@ export interface User { roles: string[]; preferences: Preferences; apiKey: string; + email: string; + ageRestriction: AgeRestriction; } \ No newline at end of file diff --git a/UI/Web/src/app/_services/account.service.ts b/UI/Web/src/app/_services/account.service.ts index 3675e818e..9da026262 100644 --- a/UI/Web/src/app/_services/account.service.ts +++ b/UI/Web/src/app/_services/account.service.ts @@ -1,6 +1,6 @@ import { HttpClient } from '@angular/common/http'; import { Injectable, OnDestroy } from '@angular/core'; -import { Observable, of, ReplaySubject, Subject } from 'rxjs'; +import { of, ReplaySubject, Subject } from 'rxjs'; import { filter, map, switchMap, takeUntil } from 'rxjs/operators'; import { environment } from 'src/environments/environment'; import { Preferences } from '../_models/preferences/preferences'; @@ -10,6 +10,17 @@ import { EVENTS, MessageHubService } from './message-hub.service'; import { ThemeService } from './theme.service'; import { InviteUserResponse } from '../_models/invite-user-response'; import { UserUpdateEvent } from '../_models/events/user-update-event'; +import { UpdateEmailResponse } from '../_models/email/update-email-response'; +import { AgeRating } from '../_models/metadata/age-rating'; +import { AgeRestriction } from '../_models/age-restriction'; + +export enum Role { + Admin = 'Admin', + ChangePassword = 'Change Password', + Bookmark = 'Bookmark', + Download = 'Download', + ChangeRestriction = 'Change Restriction' +} @Injectable({ providedIn: 'root' @@ -47,22 +58,30 @@ export class AccountService implements OnDestroy { } hasAdminRole(user: User) { - return user && user.roles.includes('Admin'); + return user && user.roles.includes(Role.Admin); } hasChangePasswordRole(user: User) { - return user && user.roles.includes('Change Password'); + return user && user.roles.includes(Role.ChangePassword); + } + + hasChangeAgeRestrictionRole(user: User) { + return user && user.roles.includes(Role.ChangeRestriction); } hasDownloadRole(user: User) { - return user && user.roles.includes('Download'); + return user && user.roles.includes(Role.Download); + } + + hasBookmarkRole(user: User) { + return user && user.roles.includes(Role.Bookmark); } getRoles() { return this.httpClient.get(this.baseUrl + 'account/roles'); } - login(model: {username: string, password: string}): Observable { + login(model: {username: string, password: string}) { return this.httpClient.post(this.baseUrl + 'account/login', model).pipe( map((response: User) => { const user = response; @@ -127,6 +146,10 @@ export class AccountService implements OnDestroy { ); } + isEmailConfirmed() { + return this.httpClient.get(this.baseUrl + 'account/email-confirmed'); + } + migrateUser(model: {email: string, username: string, password: string, sendEmail: boolean}) { return this.httpClient.post(this.baseUrl + 'account/migrate-email', model, {responseType: 'text' as 'json'}); } @@ -139,7 +162,7 @@ export class AccountService implements OnDestroy { return this.httpClient.post(this.baseUrl + 'account/resend-confirmation-email?userId=' + userId, {}, {responseType: 'text' as 'json'}); } - inviteUser(model: {email: string, roles: Array, libraries: Array}) { + inviteUser(model: {email: string, roles: Array, libraries: Array, ageRestriction: AgeRestriction}) { return this.httpClient.post(this.baseUrl + 'account/invite', model); } @@ -147,6 +170,10 @@ export class AccountService implements OnDestroy { return this.httpClient.post(this.baseUrl + 'account/confirm-email', model); } + confirmEmailUpdate(model: {email: string, token: string}) { + return this.httpClient.post(this.baseUrl + 'account/confirm-email-update', model); + } + /** * Given a user id, returns a full url for setting up the user account * @param userId @@ -165,17 +192,25 @@ export class AccountService implements OnDestroy { } confirmResetPasswordEmail(model: {email: string, token: string, password: string}) { - return this.httpClient.post(this.baseUrl + 'account/confirm-password-reset', model, {responseType: 'json' as 'text'}); + return this.httpClient.post(this.baseUrl + 'account/confirm-password-reset', model, {responseType: 'text' as 'json'}); } resetPassword(username: string, password: string, oldPassword: string) { return this.httpClient.post(this.baseUrl + 'account/reset-password', {username, password, oldPassword}, {responseType: 'json' as 'text'}); } - update(model: {email: string, roles: Array, libraries: Array, userId: number}) { + update(model: {email: string, roles: Array, libraries: Array, userId: number, ageRestriction: AgeRestriction}) { return this.httpClient.post(this.baseUrl + 'account/update', model); } + updateEmail(email: string) { + return this.httpClient.post(this.baseUrl + 'account/update/email', {email}); + } + + updateAgeRestriction(ageRating: AgeRating, includeUnknowns: boolean) { + return this.httpClient.post(this.baseUrl + 'account/update/age-restriction', {ageRating, includeUnknowns}); + } + /** * This will get latest preferences for a user and cache them into user store * @returns diff --git a/UI/Web/src/app/_services/action-factory.service.ts b/UI/Web/src/app/_services/action-factory.service.ts index 6b38dbaa4..cd72f2fce 100644 --- a/UI/Web/src/app/_services/action-factory.service.ts +++ b/UI/Web/src/app/_services/action-factory.service.ts @@ -1,14 +1,18 @@ import { Injectable } from '@angular/core'; +import { map, Observable, shareReplay } from 'rxjs'; import { Chapter } from '../_models/chapter'; import { CollectionTag } from '../_models/collection-tag'; +import { Device } from '../_models/device/device'; import { Library } from '../_models/library'; import { MangaFormat } from '../_models/manga-format'; import { ReadingList } from '../_models/reading-list'; import { Series } from '../_models/series'; import { Volume } from '../_models/volume'; import { AccountService } from './account.service'; +import { DeviceService } from './device.service'; export enum Action { + Submenu = -1, /** * Mark entity as read */ @@ -77,20 +81,37 @@ export enum Action { * Remove from user's Want to Read List */ RemoveFromWantToReadList = 16, + /** + * Send to a device + */ + SendTo = 17, } export interface ActionItem { title: string; action: Action; - callback: (action: Action, data: T) => void; + callback: (action: ActionItem, data: T) => void; requiresAdmin: boolean; + children: Array>; + /** + * An optional class which applies to an item. ie) danger on a delete action + */ + class?: string; + /** + * Indicates that there exists a separate list will be loaded from an API. + * Rule: If using this, only one child should exist in children with the Action for dynamicList. + */ + dynamicList?: Observable<{title: string, data: any}[]> | undefined; + /** + * Extra data that needs to be sent back from the card item. Used mainly for dynamicList. This will be the item from dyanamicList return + */ + _extra?: {title: string, data: any}; } @Injectable({ - providedIn: 'root' + providedIn: 'root', }) export class ActionFactoryService { - libraryActions: Array> = []; seriesActions: Array> = []; @@ -108,8 +129,8 @@ export class ActionFactoryService { isAdmin = false; hasDownloadRole = false; - constructor(private accountService: AccountService) { - this.accountService.currentUser$.subscribe(user => { + constructor(private accountService: AccountService, private deviceService: DeviceService) { + this.accountService.currentUser$.subscribe((user) => { if (user) { this.isAdmin = this.accountService.hasAdminRole(user); this.hasDownloadRole = this.accountService.hasDownloadRole(user); @@ -119,243 +140,352 @@ export class ActionFactoryService { } this._resetActions(); - - if (this.isAdmin) { - this.collectionTagActions.push({ - action: Action.Edit, - title: 'Edit', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.seriesActions.push({ - action: Action.Scan, - title: 'Scan Series', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.seriesActions.push({ - action: Action.RefreshMetadata, - title: 'Refresh Covers', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.seriesActions.push({ - action: Action.AnalyzeFiles, - title: 'Analyze Files', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.seriesActions.push({ - action: Action.Delete, - title: 'Delete', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.seriesActions.push({ - action: Action.AddToCollection, - title: 'Add to Collection', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.seriesActions.push({ - action: Action.Edit, - title: 'Edit', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.libraryActions.push({ - action: Action.Scan, - title: 'Scan Library', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.libraryActions.push({ - action: Action.RefreshMetadata, - title: 'Refresh Covers', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.libraryActions.push({ - action: Action.AnalyzeFiles, - title: 'Analyze Files', - callback: this.dummyCallback, - requiresAdmin: true - }); - - this.chapterActions.push({ - action: Action.Edit, - title: 'Details', - callback: this.dummyCallback, - requiresAdmin: false - }); - } - - if (this.hasDownloadRole || this.isAdmin) { - this.volumeActions.push({ - action: Action.Download, - title: 'Download', - callback: this.dummyCallback, - requiresAdmin: false - }); - - this.chapterActions.push({ - action: Action.Download, - title: 'Download', - callback: this.dummyCallback, - requiresAdmin: false - }); - } }); } - getLibraryActions(callback: (action: Action, library: Library) => void) { - const actions = this.libraryActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); + getLibraryActions(callback: (action: ActionItem, library: Library) => void) { + return this.applyCallbackToList(this.libraryActions, callback); + } + + getSeriesActions(callback: (action: ActionItem, series: Series) => void) { + return this.applyCallbackToList(this.seriesActions, callback); + } + + getVolumeActions(callback: (action: ActionItem, volume: Volume) => void) { + return this.applyCallbackToList(this.volumeActions, callback); + } + + getChapterActions(callback: (action: ActionItem, chapter: Chapter) => void) { + return this.applyCallbackToList(this.chapterActions, callback); + } + + getCollectionTagActions(callback: (action: ActionItem, collectionTag: CollectionTag) => void) { + return this.applyCallbackToList(this.collectionTagActions, callback); + } + + getReadingListActions(callback: (action: ActionItem, readingList: ReadingList) => void) { + return this.applyCallbackToList(this.readingListActions, callback); + } + + getBookmarkActions(callback: (action: ActionItem, series: Series) => void) { + return this.applyCallbackToList(this.bookmarkActions, callback); + } + + dummyCallback(action: ActionItem, data: any) {} + + filterSendToAction(actions: Array>, chapter: Chapter) { + if (chapter.files.filter(f => f.format === MangaFormat.EPUB || f.format === MangaFormat.PDF).length !== chapter.files.length) { + // Remove Send To as it doesn't apply + return actions.filter(item => item.title !== 'Send To'); + } return actions; } - getSeriesActions(callback: (action: Action, series: Series) => void) { - const actions = this.seriesActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); - return actions; - } + private _resetActions() { + this.libraryActions = [ + { + action: Action.Scan, + title: 'Scan Library', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, + { + action: Action.Submenu, + title: 'Others', + callback: this.dummyCallback, + requiresAdmin: true, + children: [ + { + action: Action.RefreshMetadata, + title: 'Refresh Covers', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, + { + action: Action.AnalyzeFiles, + title: 'Analyze Files', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, + ], + }, + ]; - getVolumeActions(callback: (action: Action, volume: Volume) => void) { - const actions = this.volumeActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); - return actions; - } + this.collectionTagActions = [ + { + action: Action.Edit, + title: 'Edit', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, + ]; - getChapterActions(callback: (action: Action, chapter: Chapter) => void) { - const actions = this.chapterActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); - return actions; - } - - getCollectionTagActions(callback: (action: Action, collectionTag: CollectionTag) => void) { - const actions = this.collectionTagActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); - return actions; - } - - getReadingListActions(callback: (action: Action, readingList: ReadingList) => void) { - const actions = this.readingListActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); - return actions; - } - - getBookmarkActions(callback: (action: Action, series: Series) => void) { - const actions = this.bookmarkActions.map(a => {return {...a}}); - actions.forEach(action => action.callback = callback); - return actions; - } - - dummyCallback(action: Action, data: any) {} - - _resetActions() { - this.libraryActions = []; - - this.collectionTagActions = []; - this.seriesActions = [ { action: Action.MarkAsRead, title: 'Mark as Read', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.MarkAsUnread, title: 'Mark as Unread', callback: this.dummyCallback, - requiresAdmin: false - }, - { - action: Action.AddToReadingList, - title: 'Add to Reading List', - callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { - action: Action.AddToWantToReadList, - title: 'Add to Want To Read', + action: Action.Scan, + title: 'Scan Series', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: true, + children: [], }, { - action: Action.RemoveFromWantToReadList, - title: 'Remove from Want To Read', + action: Action.Submenu, + title: 'Add to', callback: this.dummyCallback, - requiresAdmin: false - } + requiresAdmin: false, + children: [ + { + action: Action.AddToWantToReadList, + title: 'Add to Want To Read', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, + { + action: Action.RemoveFromWantToReadList, + title: 'Remove from Want To Read', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, + { + action: Action.AddToReadingList, + title: 'Add to Reading List', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, + { + action: Action.AddToCollection, + title: 'Add to Collection', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, + ], + }, + { + action: Action.Submenu, + title: 'Send To', + callback: this.dummyCallback, + requiresAdmin: false, + children: [ + { + action: Action.SendTo, + title: '', + callback: this.dummyCallback, + requiresAdmin: false, + dynamicList: this.deviceService.devices$.pipe(map((devices: Array) => devices.map(d => { + return {'title': d.name, 'data': d}; + }), shareReplay())), + children: [] + } + ], + }, + { + action: Action.Submenu, + title: 'Others', + callback: this.dummyCallback, + requiresAdmin: true, + children: [ + { + action: Action.RefreshMetadata, + title: 'Refresh Covers', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, + { + action: Action.AnalyzeFiles, + title: 'Analyze Files', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, + { + action: Action.Delete, + title: 'Delete', + callback: this.dummyCallback, + requiresAdmin: true, + class: 'danger', + children: [], + }, + ], + }, + { + action: Action.Download, + title: 'Download', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, + { + action: Action.Edit, + title: 'Edit', + callback: this.dummyCallback, + requiresAdmin: true, + children: [], + }, ]; this.volumeActions = [ + { + action: Action.IncognitoRead, + title: 'Read Incognito', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, { action: Action.MarkAsRead, title: 'Mark as Read', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.MarkAsUnread, title: 'Mark as Unread', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], + }, + { + action: Action.Submenu, + title: 'Add to', + callback: this.dummyCallback, + requiresAdmin: false, + children: [ + { + action: Action.AddToReadingList, + title: 'Add to Reading List', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + } + ] + }, + { + action: Action.Submenu, + title: 'Send To', + callback: this.dummyCallback, + requiresAdmin: false, + children: [ + { + action: Action.SendTo, + title: '', + callback: this.dummyCallback, + requiresAdmin: false, + dynamicList: this.deviceService.devices$.pipe(map((devices: Array) => devices.map(d => { + return {'title': d.name, 'data': d}; + }), shareReplay())), + children: [] + } + ], }, { - action: Action.AddToReadingList, - title: 'Add to Reading List', + action: Action.Download, + title: 'Download', callback: this.dummyCallback, - requiresAdmin: false - }, - { - action: Action.IncognitoRead, - title: 'Read Incognito', - callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.Edit, title: 'Details', callback: this.dummyCallback, - requiresAdmin: false - } + requiresAdmin: false, + children: [], + }, ]; this.chapterActions = [ + { + action: Action.IncognitoRead, + title: 'Read Incognito', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + }, { action: Action.MarkAsRead, title: 'Mark as Read', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.MarkAsUnread, title: 'Mark as Unread', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], + }, + { + action: Action.Submenu, + title: 'Add to', + callback: this.dummyCallback, + requiresAdmin: false, + children: [ + { + action: Action.AddToReadingList, + title: 'Add to Reading List', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], + } + ] + }, + { + action: Action.Submenu, + title: 'Send To', + callback: this.dummyCallback, + requiresAdmin: false, + children: [ + { + action: Action.SendTo, + title: '', + callback: this.dummyCallback, + requiresAdmin: false, + dynamicList: this.deviceService.devices$.pipe(map((devices: Array) => devices.map(d => { + return {'title': d.name, 'data': d}; + }), shareReplay())), + children: [] + } + ], + }, + // RBS will handle rendering this, so non-admins with download are appicable + { + action: Action.Download, + title: 'Download', + callback: this.dummyCallback, + requiresAdmin: false, + children: [], }, { - action: Action.IncognitoRead, - title: 'Read Incognito', + action: Action.Edit, + title: 'Details', callback: this.dummyCallback, - requiresAdmin: false - }, - { - action: Action.AddToReadingList, - title: 'Add to Reading List', - callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, ]; @@ -364,13 +494,16 @@ export class ActionFactoryService { action: Action.Edit, title: 'Edit', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.Delete, title: 'Delete', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + class: 'danger', + children: [], }, ]; @@ -379,20 +512,59 @@ export class ActionFactoryService { action: Action.ViewSeries, title: 'View Series', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.DownloadBookmark, title: 'Download', callback: this.dummyCallback, - requiresAdmin: false + requiresAdmin: false, + children: [], }, { action: Action.Delete, title: 'Clear', callback: this.dummyCallback, - requiresAdmin: false + class: 'danger', + requiresAdmin: false, + children: [], }, - ] + ]; } + + private applyCallback(action: ActionItem, callback: (action: ActionItem, data: any) => void) { + action.callback = callback; + + if (action.children === null || action.children?.length === 0) return; + + action.children?.forEach((childAction) => { + this.applyCallback(childAction, callback); + }); + } + + public applyCallbackToList(list: Array>, callback: (action: ActionItem, data: any) => void): Array> { + const actions = list.map((a) => { + return { ...a }; + }); + actions.forEach((action) => this.applyCallback(action, callback)); + return actions; + } + + // Checks the whole tree for the action and returns true if it exists + public hasAction(actions: Array>, action: Action) { + var actionFound = false; + + if (actions.length === 0) return actionFound; + + for (let i = 0; i < actions.length; i++) + { + if (actions[i].action === action) return true; + if (this.hasAction(actions[i].children, action)) return true; + } + + + return actionFound; + } + } diff --git a/UI/Web/src/app/_services/action.service.ts b/UI/Web/src/app/_services/action.service.ts index ba905174c..489c5c30d 100644 --- a/UI/Web/src/app/_services/action.service.ts +++ b/UI/Web/src/app/_services/action.service.ts @@ -8,10 +8,12 @@ import { AddToListModalComponent, ADD_FLOW } from '../reading-list/_modals/add-t import { EditReadingListModalComponent } from '../reading-list/_modals/edit-reading-list-modal/edit-reading-list-modal.component'; import { ConfirmService } from '../shared/confirm.service'; import { Chapter } from '../_models/chapter'; +import { Device } from '../_models/device/device'; import { Library } from '../_models/library'; import { ReadingList } from '../_models/reading-list'; import { Series } from '../_models/series'; import { Volume } from '../_models/volume'; +import { DeviceService } from './device.service'; import { LibraryService } from './library.service'; import { MemberService } from './member.service'; import { ReaderService } from './reader.service'; @@ -39,7 +41,7 @@ export class ActionService implements OnDestroy { constructor(private libraryService: LibraryService, private seriesService: SeriesService, private readerService: ReaderService, private toastr: ToastrService, private modalService: NgbModal, - private confirmService: ConfirmService, private memberService: MemberService) { } + private confirmService: ConfirmService, private memberService: MemberService, private deviceSerivce: DeviceService) { } ngOnDestroy() { this.onDestroy.next(); @@ -552,6 +554,15 @@ export class ActionService implements OnDestroy { }); } + sendToDevice(chapterIds: Array, device: Device, callback?: VoidActionCallback) { + this.deviceSerivce.sendTo(chapterIds, device.id).subscribe(() => { + this.toastr.success('File emailed to ' + device.name); + if (callback) { + callback(); + } + }); + } + private async promptIfForce(extraContent: string = '') { // Prompt user if we should do a force or not const config = this.confirmService.defaultConfirm; diff --git a/UI/Web/src/app/_services/device.service.ts b/UI/Web/src/app/_services/device.service.ts new file mode 100644 index 000000000..52e1c5aad --- /dev/null +++ b/UI/Web/src/app/_services/device.service.ts @@ -0,0 +1,57 @@ +import { HttpClient } from '@angular/common/http'; +import { Injectable } from '@angular/core'; +import { ReplaySubject, shareReplay, tap } from 'rxjs'; +import { environment } from 'src/environments/environment'; +import { Device } from '../_models/device/device'; +import { DevicePlatform } from '../_models/device/device-platform'; +import { AccountService } from './account.service'; + +@Injectable({ + providedIn: 'root' +}) +export class DeviceService { + + baseUrl = environment.apiUrl; + + private devicesSource: ReplaySubject = new ReplaySubject(1); + public devices$ = this.devicesSource.asObservable().pipe(shareReplay()); + + + constructor(private httpClient: HttpClient, private accountService: AccountService) { + // Ensure we are authenticated before we make an authenticated api call. + this.accountService.currentUser$.subscribe(user => { + if (!user) { + this.devicesSource.next([]); + return; + } + + this.httpClient.get(this.baseUrl + 'device', {}).subscribe(data => { + this.devicesSource.next(data); + }); + }); + } + + createDevice(name: string, platform: DevicePlatform, emailAddress: string) { + return this.httpClient.post(this.baseUrl + 'device/create', {name, platform, emailAddress}, {responseType: 'text' as 'json'}); + } + + updateDevice(id: number, name: string, platform: DevicePlatform, emailAddress: string) { + return this.httpClient.post(this.baseUrl + 'device/update', {id, name, platform, emailAddress}, {responseType: 'text' as 'json'}); + } + + deleteDevice(id: number) { + return this.httpClient.delete(this.baseUrl + 'device?deviceId=' + id); + } + + getDevices() { + return this.httpClient.get(this.baseUrl + 'device', {}).pipe(tap(data => { + this.devicesSource.next(data); + })); + } + + sendTo(chapterIds: Array, deviceId: number) { + return this.httpClient.post(this.baseUrl + 'device/send-to', {deviceId, chapterIds}, {responseType: 'text' as 'json'}); + } + + +} diff --git a/UI/Web/src/app/_services/library.service.ts b/UI/Web/src/app/_services/library.service.ts index 5aac12cfd..403fd409e 100644 --- a/UI/Web/src/app/_services/library.service.ts +++ b/UI/Web/src/app/_services/library.service.ts @@ -1,11 +1,10 @@ import { HttpClient } from '@angular/common/http'; import { Injectable } from '@angular/core'; import { of } from 'rxjs'; -import { map, take } from 'rxjs/operators'; +import { map } from 'rxjs/operators'; import { environment } from 'src/environments/environment'; import { JumpKey } from '../_models/jumpbar/jump-key'; import { Library, LibraryType } from '../_models/library'; -import { SearchResultGroup } from '../_models/search/search-result-group'; import { DirectoryDto } from '../_models/system/directory-dto'; @@ -68,10 +67,6 @@ export class LibraryService { return this.httpClient.get(this.baseUrl + 'library'); } - getLibrariesForMember() { - return this.httpClient.get(this.baseUrl + 'library/libraries'); - } - updateLibrariesForMember(username: string, selectedLibraries: Library[]) { return this.httpClient.post(this.baseUrl + 'library/grant-access', {username, selectedLibraries}); } @@ -113,12 +108,4 @@ export class LibraryService { return this.libraryTypes[libraryId]; })); } - - search(term: string) { - if (term === '') { - return of(new SearchResultGroup()); - } - return this.httpClient.get(this.baseUrl + 'library/search?queryString=' + encodeURIComponent(term)); - } - } diff --git a/UI/Web/src/app/_services/message-hub.service.ts b/UI/Web/src/app/_services/message-hub.service.ts index 961afd6cb..d454f3866 100644 --- a/UI/Web/src/app/_services/message-hub.service.ts +++ b/UI/Web/src/app/_services/message-hub.service.ts @@ -51,31 +51,35 @@ export enum EVENTS { /** * A subtype of NotificationProgress that represents a file being processed for cover image extraction */ - CoverUpdateProgress = 'CoverUpdateProgress', + CoverUpdateProgress = 'CoverUpdateProgress', /** * A library is created or removed from the instance */ - LibraryModified = 'LibraryModified', + LibraryModified = 'LibraryModified', /** * A user updates an entities read progress */ - UserProgressUpdate = 'UserProgressUpdate', + UserProgressUpdate = 'UserProgressUpdate', /** * A user updates account or preferences */ - UserUpdate = 'UserUpdate', + UserUpdate = 'UserUpdate', /** * When bulk bookmarks are being converted */ - ConvertBookmarksProgress = 'ConvertBookmarksProgress', + ConvertBookmarksProgress = 'ConvertBookmarksProgress', /** * When files are being scanned to calculate word count */ - WordCountAnalyzerProgress = 'WordCountAnalyzerProgress', + WordCountAnalyzerProgress = 'WordCountAnalyzerProgress', /** * When the user needs to be informed, but it's not a big deal */ - Info = 'Info', + Info = 'Info', + /** + * A user is sending files to their device + */ + SendingToDevice = 'SendingToDevice', } export interface Message { @@ -142,6 +146,12 @@ export class MessageHubService { this.onlineUsersSource.next(usernames); }); + this.hubConnection.on("LogObject", resp => { + console.log(resp); + }); + this.hubConnection.on("LogString", resp => { + console.log(resp); + }); this.hubConnection.on(EVENTS.ScanSeries, resp => { this.messagesSource.next({ @@ -255,6 +265,13 @@ export class MessageHubService { payload: resp.body }); }); + + this.hubConnection.on(EVENTS.SendingToDevice, resp => { + this.messagesSource.next({ + event: EVENTS.SendingToDevice, + payload: resp.body + }); + }); } stopHubConnection() { diff --git a/UI/Web/src/app/_services/metadata.service.ts b/UI/Web/src/app/_services/metadata.service.ts index a8b1e9b3e..c2ec18320 100644 --- a/UI/Web/src/app/_services/metadata.service.ts +++ b/UI/Web/src/app/_services/metadata.service.ts @@ -86,8 +86,6 @@ export class MetadataService { return of(this.validLanguages); } return this.httpClient.get>(this.baseUrl + 'metadata/all-languages').pipe(map(l => this.validLanguages = l)); - - //return this.httpClient.get>(this.baseUrl + 'metadata/all-languages').pipe(); } getAllPeople(libraries?: Array) { diff --git a/UI/Web/src/app/_services/reader.service.ts b/UI/Web/src/app/_services/reader.service.ts index 516d04c9e..ddabc0ab8 100644 --- a/UI/Web/src/app/_services/reader.service.ts +++ b/UI/Web/src/app/_services/reader.service.ts @@ -1,4 +1,4 @@ -import { HttpClient } from '@angular/common/http'; +import { HttpClient, HttpParams } from '@angular/common/http'; import { Injectable } from '@angular/core'; import { Location } from '@angular/common'; import { Router } from '@angular/router'; @@ -10,6 +10,9 @@ import { MangaFormat } from '../_models/manga-format'; import { BookmarkInfo } from '../_models/manga-reader/bookmark-info'; import { PageBookmark } from '../_models/page-bookmark'; import { ProgressBookmark } from '../_models/progress-bookmark'; +import { SeriesFilter } from '../_models/series-filter'; +import { UtilityService } from '../shared/_services/utility.service'; +import { FilterUtilitiesService } from '../shared/_services/filter-utilities.service'; export const CHAPTER_ID_DOESNT_EXIST = -1; export const CHAPTER_ID_NOT_FETCHED = -2; @@ -24,7 +27,9 @@ export class ReaderService { // Override background color for reader and restore it onDestroy private originalBodyColor!: string; - constructor(private httpClient: HttpClient, private router: Router, private location: Location) { } + constructor(private httpClient: HttpClient, private router: Router, + private location: Location, private utilityService: UtilityService, + private filterUtilitySerivce: FilterUtilitiesService) { } getNavigationArray(libraryId: number, seriesId: number, chapterId: number, format: MangaFormat) { if (format === undefined) format = MangaFormat.ARCHIVE; @@ -50,20 +55,24 @@ export class ReaderService { return this.httpClient.post(this.baseUrl + 'reader/unbookmark', {seriesId, volumeId, chapterId, page}); } - getAllBookmarks() { - return this.httpClient.get(this.baseUrl + 'reader/get-all-bookmarks'); + getAllBookmarks(filter: SeriesFilter | undefined) { + let params = new HttpParams(); + params = this.utilityService.addPaginationIfExists(params, undefined, undefined); + const data = this.filterUtilitySerivce.createSeriesFilter(filter); + + return this.httpClient.post(this.baseUrl + 'reader/all-bookmarks', data); } getBookmarks(chapterId: number) { - return this.httpClient.get(this.baseUrl + 'reader/get-bookmarks?chapterId=' + chapterId); + return this.httpClient.get(this.baseUrl + 'reader/chapter-bookmarks?chapterId=' + chapterId); } getBookmarksForVolume(volumeId: number) { - return this.httpClient.get(this.baseUrl + 'reader/get-volume-bookmarks?volumeId=' + volumeId); + return this.httpClient.get(this.baseUrl + 'reader/volume-bookmarks?volumeId=' + volumeId); } getBookmarksForSeries(seriesId: number) { - return this.httpClient.get(this.baseUrl + 'reader/get-series-bookmarks?seriesId=' + seriesId); + return this.httpClient.get(this.baseUrl + 'reader/series-bookmarks?seriesId=' + seriesId); } clearBookmarks(seriesId: number) { diff --git a/UI/Web/src/app/_services/search.service.ts b/UI/Web/src/app/_services/search.service.ts new file mode 100644 index 000000000..fa989fa35 --- /dev/null +++ b/UI/Web/src/app/_services/search.service.ts @@ -0,0 +1,31 @@ +import { HttpClient } from '@angular/common/http'; +import { Injectable } from '@angular/core'; +import { of } from 'rxjs'; +import { environment } from 'src/environments/environment'; +import { SearchResultGroup } from '../_models/search/search-result-group'; +import { Series } from '../_models/series'; + +@Injectable({ + providedIn: 'root' +}) +export class SearchService { + + baseUrl = environment.apiUrl; + + constructor(private httpClient: HttpClient) { } + + search(term: string) { + if (term === '') { + return of(new SearchResultGroup()); + } + return this.httpClient.get(this.baseUrl + 'search/search?queryString=' + encodeURIComponent(term)); + } + + getSeriesForMangaFile(mangaFileId: number) { + return this.httpClient.get(this.baseUrl + 'search/series-for-mangafile?mangaFileId=' + mangaFileId); + } + + getSeriesForChapter(chapterId: number) { + return this.httpClient.get(this.baseUrl + 'search/series-for-chapter?chapterId=' + chapterId); + } +} diff --git a/UI/Web/src/app/_services/series.service.ts b/UI/Web/src/app/_services/series.service.ts index cc9c4ef60..3332fc771 100644 --- a/UI/Web/src/app/_services/series.service.ts +++ b/UI/Web/src/app/_services/series.service.ts @@ -3,6 +3,7 @@ import { Injectable } from '@angular/core'; import { Observable, of } from 'rxjs'; import { map } from 'rxjs/operators'; import { environment } from 'src/environments/environment'; +import { FilterUtilitiesService } from '../shared/_services/filter-utilities.service'; import { UtilityService } from '../shared/_services/utility.service'; import { Chapter } from '../_models/chapter'; import { ChapterMetadata } from '../_models/chapter-metadata'; @@ -26,12 +27,13 @@ export class SeriesService { paginatedResults: PaginatedResult = new PaginatedResult(); paginatedSeriesForTagsResults: PaginatedResult = new PaginatedResult(); - constructor(private httpClient: HttpClient, private imageService: ImageService, private utilityService: UtilityService) { } + constructor(private httpClient: HttpClient, private imageService: ImageService, + private utilityService: UtilityService, private filterUtilitySerivce: FilterUtilitiesService) { } getAllSeries(pageNum?: number, itemsPerPage?: number, filter?: SeriesFilter) { let params = new HttpParams(); params = this.utilityService.addPaginationIfExists(params, pageNum, itemsPerPage); - const data = this.createSeriesFilter(filter); + const data = this.filterUtilitySerivce.createSeriesFilter(filter); return this.httpClient.post>(this.baseUrl + 'series/all', data, {observe: 'response', params}).pipe( map((response: any) => { @@ -43,7 +45,7 @@ export class SeriesService { getSeriesForLibrary(libraryId: number, pageNum?: number, itemsPerPage?: number, filter?: SeriesFilter) { let params = new HttpParams(); params = this.utilityService.addPaginationIfExists(params, pageNum, itemsPerPage); - const data = this.createSeriesFilter(filter); + const data = this.filterUtilitySerivce.createSeriesFilter(filter); return this.httpClient.post>(this.baseUrl + 'series?libraryId=' + libraryId, data, {observe: 'response', params}).pipe( map((response: any) => { @@ -76,14 +78,6 @@ export class SeriesService { return this.httpClient.get(this.baseUrl + 'series/chapter-metadata?chapterId=' + chapterId); } - getSeriesForMangaFile(mangaFileId: number) { - return this.httpClient.get(this.baseUrl + 'series/series-for-mangafile?mangaFileId=' + mangaFileId); - } - - getSeriesForChapter(chapterId: number) { - return this.httpClient.get(this.baseUrl + 'series/series-for-chapter?chapterId=' + chapterId); - } - delete(seriesId: number) { return this.httpClient.delete(this.baseUrl + 'series/' + seriesId); } @@ -109,7 +103,7 @@ export class SeriesService { } getRecentlyAdded(libraryId: number = 0, pageNum?: number, itemsPerPage?: number, filter?: SeriesFilter) { - const data = this.createSeriesFilter(filter); + const data = this.filterUtilitySerivce.createSeriesFilter(filter); let params = new HttpParams(); params = this.utilityService.addPaginationIfExists(params, pageNum, itemsPerPage); @@ -125,7 +119,7 @@ export class SeriesService { } getWantToRead(pageNum?: number, itemsPerPage?: number, filter?: SeriesFilter): Observable> { - const data = this.createSeriesFilter(filter); + const data = this.filterUtilitySerivce.createSeriesFilter(filter); let params = new HttpParams(); params = this.utilityService.addPaginationIfExists(params, pageNum, itemsPerPage); @@ -137,7 +131,7 @@ export class SeriesService { } getOnDeck(libraryId: number = 0, pageNum?: number, itemsPerPage?: number, filter?: SeriesFilter) { - const data = this.createSeriesFilter(filter); + const data = this.filterUtilitySerivce.createSeriesFilter(filter); let params = new HttpParams(); params = this.utilityService.addPaginationIfExists(params, pageNum, itemsPerPage); @@ -192,53 +186,16 @@ export class SeriesService { return this.httpClient.get(this.baseUrl + 'series/all-related?seriesId=' + seriesId); } - updateRelationships(seriesId: number, adaptations: Array, characters: Array, - contains: Array, others: Array, prequels: Array, + updateRelationships(seriesId: number, adaptations: Array, characters: Array, + contains: Array, others: Array, prequels: Array, sequels: Array, sideStories: Array, spinOffs: Array, - alternativeSettings: Array, alternativeVersions: Array, doujinshis: Array) { - return this.httpClient.post(this.baseUrl + 'series/update-related?seriesId=' + seriesId, + alternativeSettings: Array, alternativeVersions: Array, doujinshis: Array, editions: Array) { + return this.httpClient.post(this.baseUrl + 'series/update-related?seriesId=' + seriesId, {seriesId, adaptations, characters, sequels, prequels, contains, others, sideStories, spinOffs, - alternativeSettings, alternativeVersions, doujinshis}); + alternativeSettings, alternativeVersions, doujinshis, editions}); } getSeriesDetail(seriesId: number) { return this.httpClient.get(this.baseUrl + 'series/series-detail?seriesId=' + seriesId); } - - - - createSeriesFilter(filter?: SeriesFilter) { - if (filter !== undefined) return filter; - const data: SeriesFilter = { - formats: [], - libraries: [], - genres: [], - writers: [], - artists: [], - penciller: [], - inker: [], - colorist: [], - letterer: [], - coverArtist: [], - editor: [], - publisher: [], - character: [], - translators: [], - collectionTags: [], - rating: 0, - readStatus: { - read: true, - inProgress: true, - notRead: true - }, - sortOptions: null, - ageRating: [], - tags: [], - languages: [], - publicationStatus: [], - seriesNameQuery: '', - }; - - return data; - } } diff --git a/UI/Web/src/app/_services/server.service.ts b/UI/Web/src/app/_services/server.service.ts index 0d51a9120..bd6e649c1 100644 --- a/UI/Web/src/app/_services/server.service.ts +++ b/UI/Web/src/app/_services/server.service.ts @@ -26,6 +26,10 @@ export class ServerService { return this.httpClient.post(this.baseUrl + 'server/clear-cache', {}); } + cleanupWantToRead() { + return this.httpClient.post(this.baseUrl + 'server/cleanup-want-to-read', {}); + } + backupDatabase() { return this.httpClient.post(this.baseUrl + 'server/backup-db', {}); } @@ -42,7 +46,7 @@ export class ServerService { return this.httpClient.get(this.baseUrl + 'server/accessible'); } - getReoccuringJobs() { + getRecurringJobs() { return this.httpClient.get(this.baseUrl + 'server/jobs'); } diff --git a/UI/Web/src/app/_services/theme.service.ts b/UI/Web/src/app/_services/theme.service.ts index 18e3764b9..8414150cc 100644 --- a/UI/Web/src/app/_services/theme.service.ts +++ b/UI/Web/src/app/_services/theme.service.ts @@ -3,11 +3,12 @@ import { HttpClient } from '@angular/common/http'; import { Inject, Injectable, OnDestroy, Renderer2, RendererFactory2, SecurityContext } from '@angular/core'; import { DomSanitizer } from '@angular/platform-browser'; import { ToastrService } from 'ngx-toastr'; -import { map, ReplaySubject, Subject, takeUntil, take } from 'rxjs'; +import { map, ReplaySubject, Subject, takeUntil, take, distinctUntilChanged, Observable } from 'rxjs'; import { environment } from 'src/environments/environment'; import { ConfirmService } from '../shared/confirm.service'; import { NotificationProgressEvent } from '../_models/events/notification-progress-event'; import { SiteTheme, ThemeProvider } from '../_models/preferences/site-theme'; +import { AccountService } from './account.service'; import { EVENTS, MessageHubService } from './message-hub.service'; @@ -24,7 +25,7 @@ export class ThemeService implements OnDestroy { private themesSource = new ReplaySubject(1); public themes$ = this.themesSource.asObservable(); - + /** * Maintain a cache of themes. SignalR will inform us if we need to refresh cache */ @@ -77,7 +78,7 @@ export class ThemeService implements OnDestroy { this.themeCache = themes; this.themesSource.next(themes); this.currentTheme$.pipe(take(1)).subscribe(theme => { - if (!themes.includes(theme)) { + if (themes.filter(t => t.id === theme.id).length === 0) { this.setTheme(this.defaultTheme); this.toastr.info('The active theme no longer exists. Please refresh the page.'); } diff --git a/UI/Web/src/app/admin/_models/server-settings.ts b/UI/Web/src/app/admin/_models/server-settings.ts index 72438a431..f7e05f895 100644 --- a/UI/Web/src/app/admin/_models/server-settings.ts +++ b/UI/Web/src/app/admin/_models/server-settings.ts @@ -12,5 +12,6 @@ export interface ServerSettings { convertBookmarkToWebP: boolean; enableSwaggerUi: boolean; totalBackups: number; + totalLogs: number; enableFolderWatching: boolean; } diff --git a/UI/Web/src/app/admin/admin.module.ts b/UI/Web/src/app/admin/admin.module.ts index dd20d02ca..2bee7e133 100644 --- a/UI/Web/src/app/admin/admin.module.ts +++ b/UI/Web/src/app/admin/admin.module.ts @@ -23,6 +23,8 @@ import { SidenavModule } from '../sidenav/sidenav.module'; import { ManageMediaSettingsComponent } from './manage-media-settings/manage-media-settings.component'; import { ManageEmailSettingsComponent } from './manage-email-settings/manage-email-settings.component'; import { ManageTasksSettingsComponent } from './manage-tasks-settings/manage-tasks-settings.component'; +import { ManageLogsComponent } from './manage-logs/manage-logs.component'; +import { VirtualScrollerModule } from '@iharbeck/ngx-virtual-scroller'; @@ -45,6 +47,7 @@ import { ManageTasksSettingsComponent } from './manage-tasks-settings/manage-tas ManageMediaSettingsComponent, ManageEmailSettingsComponent, ManageTasksSettingsComponent, + ManageLogsComponent, ], imports: [ CommonModule, @@ -59,6 +62,7 @@ import { ManageTasksSettingsComponent } from './manage-tasks-settings/manage-tas PipeModule, SidenavModule, UserSettingsModule, // API-key componet + VirtualScrollerModule ], providers: [] }) diff --git a/UI/Web/src/app/admin/dashboard/dashboard.component.html b/UI/Web/src/app/admin/dashboard/dashboard.component.html index 6dbcd7cf6..1b8fc85a8 100644 --- a/UI/Web/src/app/admin/dashboard/dashboard.component.html +++ b/UI/Web/src/app/admin/dashboard/dashboard.component.html @@ -23,6 +23,9 @@ + + + diff --git a/UI/Web/src/app/admin/dashboard/dashboard.component.ts b/UI/Web/src/app/admin/dashboard/dashboard.component.ts index 097a3674b..c50c9344b 100644 --- a/UI/Web/src/app/admin/dashboard/dashboard.component.ts +++ b/UI/Web/src/app/admin/dashboard/dashboard.component.ts @@ -13,7 +13,8 @@ enum TabID { Libraries = 'libraries', System = 'system', Plugins = 'plugins', - Tasks = 'tasks' + Tasks = 'tasks', + Logs = 'logs' } @Component({ @@ -27,6 +28,7 @@ export class DashboardComponent implements OnInit { {title: 'General', fragment: TabID.General}, {title: 'Users', fragment: TabID.Users}, {title: 'Libraries', fragment: TabID.Libraries}, + //{title: 'Logs', fragment: TabID.Logs}, {title: 'Media', fragment: TabID.Media}, {title: 'Email', fragment: TabID.Email}, //{title: 'Plugins', fragment: TabID.Plugins}, diff --git a/UI/Web/src/app/admin/edit-user/edit-user.component.html b/UI/Web/src/app/admin/edit-user/edit-user.component.html index 4ab5c3d0f..4a5e8a600 100644 --- a/UI/Web/src/app/admin/edit-user/edit-user.component.html +++ b/UI/Web/src/app/admin/edit-user/edit-user.component.html @@ -1,58 +1,66 @@ - -