diff --git a/.gitignore b/.gitignore index a6096f916..319923287 100644 --- a/.gitignore +++ b/.gitignore @@ -519,5 +519,5 @@ API/config/stats/* API/config/stats/app_stats.json API/config/pre-metadata/ API/config/post-metadata/ - +API.Tests/TestResults/ UI/Web/.vscode/settings.json diff --git a/API.Benchmark/ParseScannedFilesBenchmarks.cs b/API.Benchmark/ParseScannedFilesBenchmarks.cs index bf8f65c24..a180d566f 100644 --- a/API.Benchmark/ParseScannedFilesBenchmarks.cs +++ b/API.Benchmark/ParseScannedFilesBenchmarks.cs @@ -1,7 +1,6 @@ using System.IO; using System.IO.Abstractions; using API.Entities.Enums; -using API.Interfaces.Services; using API.Parser; using API.Services; using API.Services.Tasks.Scanner; @@ -25,9 +24,11 @@ namespace API.Benchmark public ParseScannedFilesBenchmarks() { - IBookService bookService = new BookService(_bookLogger); - _parseScannedFiles = new ParseScannedFiles(bookService, _logger, _archiveService, - new DirectoryService(Substitute.For>(), new FileSystem())); + var directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); + _parseScannedFiles = new ParseScannedFiles( + Substitute.For(), + directoryService, + new ReadingItemService(_archiveService, new BookService(_bookLogger, directoryService, new ImageService(Substitute.For>(), directoryService)), Substitute.For(), directoryService)); } // [Benchmark] diff --git a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs index f6119cc69..e7c8e9994 100644 --- a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs +++ b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs @@ -1,15 +1,27 @@ using System.Collections.Generic; +using System.IO.Abstractions.TestingHelpers; using System.Linq; using API.Entities.Enums; using API.Extensions; using API.Parser; +using API.Services; using API.Tests.Helpers; +using Microsoft.Extensions.Logging; +using NSubstitute; using Xunit; namespace API.Tests.Extensions { public class ParserInfoListExtensions { + private readonly DefaultParser _defaultParser; + public ParserInfoListExtensions() + { + _defaultParser = + new DefaultParser(new DirectoryService(Substitute.For>(), + new MockFileSystem())); + } + [Theory] [InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})] public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers) @@ -17,7 +29,7 @@ namespace API.Tests.Extensions var infos = volumeNumbers.Select(n => new ParserInfo() {Volumes = n}).ToList(); Assert.Equal(expectedNumbers, infos.DistinctVolumes()); } - + [Theory] [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] @@ -27,7 +39,7 @@ namespace API.Tests.Extensions var infos = new List(); foreach (var filename in inputInfos) { - infos.Add(API.Parser.Parser.Parse( + infos.Add(_defaultParser.Parse( filename, string.Empty)); } @@ -38,4 +50,4 @@ namespace API.Tests.Extensions Assert.Equal(expectedHasInfo, infos.HasInfo(chapter)); } } -} \ No newline at end of file +} diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs index 832db8ea3..792e5d733 100644 --- a/API.Tests/Parser/ComicParserTests.cs +++ b/API.Tests/Parser/ComicParserTests.cs @@ -1,6 +1,10 @@ using System.Collections.Generic; +using System.IO.Abstractions.TestingHelpers; using API.Entities.Enums; using API.Parser; +using API.Services; +using Microsoft.Extensions.Logging; +using NSubstitute; using Xunit; using Xunit.Abstractions; @@ -9,10 +13,14 @@ namespace API.Tests.Parser public class ComicParserTests { private readonly ITestOutputHelper _testOutputHelper; + private readonly DefaultParser _defaultParser; public ComicParserTests(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; + _defaultParser = + new DefaultParser(new DirectoryService(Substitute.For>(), + new MockFileSystem())); } [Theory] @@ -158,72 +166,5 @@ namespace API.Tests.Parser { Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseComicSpecial(input))); } - - [Fact] - public void ParseInfoTest() - { - const string rootPath = @"E:/Comics/"; - var expected = new Dictionary(); - var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr"; - expected.Add(filepath, new ParserInfo - { - Series = "Teen Titans", Volumes = "0", - Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - // Fallback test with bad naming - filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr"; - expected.Add(filepath, new ParserInfo - { - Series = "Babe", Volumes = "0", Edition = "", - Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr"; - expected.Add(filepath, new ParserInfo - { - Series = "Batman the Detective", Volumes = "6", Edition = "", - Chapters = "11", Filename = "Batman the Detective - v6 - 11 - (2021).cbr", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr"; - expected.Add(filepath, new ParserInfo - { - Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "", - Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - foreach (var file in expected.Keys) - { - var expectedInfo = expected[file]; - var actual = API.Parser.Parser.Parse(file, rootPath, LibraryType.Comic); - if (expectedInfo == null) - { - Assert.Null(actual); - return; - } - Assert.NotNull(actual); - _testOutputHelper.WriteLine($"Validating {file}"); - Assert.Equal(expectedInfo.Format, actual.Format); - _testOutputHelper.WriteLine("Format ✓"); - Assert.Equal(expectedInfo.Series, actual.Series); - _testOutputHelper.WriteLine("Series ✓"); - Assert.Equal(expectedInfo.Chapters, actual.Chapters); - _testOutputHelper.WriteLine("Chapters ✓"); - Assert.Equal(expectedInfo.Volumes, actual.Volumes); - _testOutputHelper.WriteLine("Volumes ✓"); - Assert.Equal(expectedInfo.Edition, actual.Edition); - _testOutputHelper.WriteLine("Edition ✓"); - Assert.Equal(expectedInfo.Filename, actual.Filename); - _testOutputHelper.WriteLine("Filename ✓"); - Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath); - _testOutputHelper.WriteLine("FullFilePath ✓"); - } - } - } } diff --git a/API.Tests/Parser/DefaultParserTests.cs b/API.Tests/Parser/DefaultParserTests.cs new file mode 100644 index 000000000..9408cfc58 --- /dev/null +++ b/API.Tests/Parser/DefaultParserTests.cs @@ -0,0 +1,303 @@ +using System.Collections.Generic; +using System.IO.Abstractions.TestingHelpers; +using API.Entities.Enums; +using API.Parser; +using API.Services; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; +using Xunit.Abstractions; + +namespace API.Tests.Parser; + +public class DefaultParserTests +{ + private readonly ITestOutputHelper _testOutputHelper; + private readonly DefaultParser _defaultParser; + + public DefaultParserTests(ITestOutputHelper testOutputHelper) + { + _testOutputHelper = testOutputHelper; + var directoryService = new DirectoryService(Substitute.For>(), new MockFileSystem()); + _defaultParser = new DefaultParser(directoryService); + } + + + #region ParseFromFallbackFolders + [Theory] + [InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")] + [InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")] + [InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")] + public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries) + { + var actual = _defaultParser.Parse(inputPath, rootDir); + if (actual == null) + { + Assert.NotNull(actual); + return; + } + + Assert.Equal(expectedSeries, actual.Series); + } + + [Theory] + [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")] + [InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")] + [InlineData("/manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster #8~0~1")] + public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string expectedParseInfo) + { + const string rootDirectory = "/manga/"; + var tokens = expectedParseInfo.Split("~"); + var actual = new ParserInfo {Chapters = "0", Volumes = "0"}; + _defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual); + Assert.Equal(tokens[0], actual.Series); + Assert.Equal(tokens[1], actual.Volumes); + Assert.Equal(tokens[2], actual.Chapters); + } + + #endregion + + + #region Parse + + [Fact] + public void Parse_ParseInfo_Manga() + { + const string rootPath = @"E:/Manga/"; + var expected = new Dictionary(); + var filepath = @"E:/Manga/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Mujaki no Rakuen", Volumes = "12", + Chapters = "76", Filename = "Mujaki no Rakuen Vol12 ch76.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:/Manga/Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1", + Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip"; + expected.Add(filepath, new ParserInfo + { + Series = "Beelzebub", Volumes = "0", + Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip"; + expected.Add(filepath, new ParserInfo + { + Series = "Ichinensei ni Nacchattara", Volumes = "1", + Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition", + Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "", + Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Dorohedoro", Volumes = "1", Edition = "", + Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "APOSIMZ", Volumes = "0", Edition = "", + Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "", + Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "", + Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr"; + expected.Add(filepath, new ParserInfo + { + Series = "Summer Time Rendering", Volumes = "0", Edition = "", + Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = true + }); + + filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "", + Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "", + Chapters = "0", Filename = "Vol. 00 Ch. 000.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "Toukyou Akazukin", Volumes = "1", Edition = "", + Chapters = "1", Filename = "Vol. 01 Ch. 001.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub"; + expected.Add(filepath, new ParserInfo + { + Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "", + Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub, + FullFilePath = filepath, IsSpecial = false + }); + + // If an image is cover exclusively, ignore it + filepath = @"E:\Manga\Seraph of the End\cover.png"; + expected.Add(filepath, null); + + filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz"; + expected.Add(filepath, new ParserInfo + { + Series = "The Beginning After the End", Volumes = "0", Edition = "", + Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg"; + expected.Add(filepath, new ParserInfo + { + Series = "Monster #8", Volumes = "0", Edition = "", + Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + + foreach (var file in expected.Keys) + { + var expectedInfo = expected[file]; + var actual = _defaultParser.Parse(file, rootPath); + if (expectedInfo == null) + { + Assert.Null(actual); + return; + } + Assert.NotNull(actual); + _testOutputHelper.WriteLine($"Validating {file}"); + Assert.Equal(expectedInfo.Format, actual.Format); + _testOutputHelper.WriteLine("Format ✓"); + Assert.Equal(expectedInfo.Series, actual.Series); + _testOutputHelper.WriteLine("Series ✓"); + Assert.Equal(expectedInfo.Chapters, actual.Chapters); + _testOutputHelper.WriteLine("Chapters ✓"); + Assert.Equal(expectedInfo.Volumes, actual.Volumes); + _testOutputHelper.WriteLine("Volumes ✓"); + Assert.Equal(expectedInfo.Edition, actual.Edition); + _testOutputHelper.WriteLine("Edition ✓"); + Assert.Equal(expectedInfo.Filename, actual.Filename); + _testOutputHelper.WriteLine("Filename ✓"); + Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath); + _testOutputHelper.WriteLine("FullFilePath ✓"); + } + } + + [Fact] + public void Parse_ParseInfo_Comic() + { + const string rootPath = @"E:/Comics/"; + var expected = new Dictionary(); + var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr"; + expected.Add(filepath, new ParserInfo + { + Series = "Teen Titans", Volumes = "0", + Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive, + FullFilePath = filepath + }); + + // Fallback test with bad naming + filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr"; + expected.Add(filepath, new ParserInfo + { + Series = "Babe", Volumes = "0", Edition = "", + Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr"; + expected.Add(filepath, new ParserInfo + { + Series = "Batman the Detective", Volumes = "6", Edition = "", + Chapters = "11", Filename = "Batman the Detective - v6 - 11 - (2021).cbr", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr"; + expected.Add(filepath, new ParserInfo + { + Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "", + Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive, + FullFilePath = filepath, IsSpecial = false + }); + + foreach (var file in expected.Keys) + { + var expectedInfo = expected[file]; + var actual = _defaultParser.Parse(file, rootPath, LibraryType.Comic); + if (expectedInfo == null) + { + Assert.Null(actual); + return; + } + Assert.NotNull(actual); + _testOutputHelper.WriteLine($"Validating {file}"); + Assert.Equal(expectedInfo.Format, actual.Format); + _testOutputHelper.WriteLine("Format ✓"); + Assert.Equal(expectedInfo.Series, actual.Series); + _testOutputHelper.WriteLine("Series ✓"); + Assert.Equal(expectedInfo.Chapters, actual.Chapters); + _testOutputHelper.WriteLine("Chapters ✓"); + Assert.Equal(expectedInfo.Volumes, actual.Volumes); + _testOutputHelper.WriteLine("Volumes ✓"); + Assert.Equal(expectedInfo.Edition, actual.Edition); + _testOutputHelper.WriteLine("Edition ✓"); + Assert.Equal(expectedInfo.Filename, actual.Filename); + _testOutputHelper.WriteLine("Filename ✓"); + Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath); + _testOutputHelper.WriteLine("FullFilePath ✓"); + } + } + #endregion +} diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index 1576cbb07..7f7d685e6 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -294,194 +294,6 @@ namespace API.Tests.Parser } - [Theory] - [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")] - [InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")] - [InlineData("/manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster #8~0~1")] - public void ParseFromFallbackFoldersTest(string inputFile, string expectedParseInfo) - { - const string rootDirectory = "/manga/"; - var tokens = expectedParseInfo.Split("~"); - var actual = new ParserInfo {Chapters = "0", Volumes = "0"}; - API.Parser.Parser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual); - Assert.Equal(tokens[0], actual.Series); - Assert.Equal(tokens[1], actual.Volumes); - Assert.Equal(tokens[2], actual.Chapters); - } - - [Fact] - public void ParseInfoTest() - { - const string rootPath = @"E:/Manga/"; - var expected = new Dictionary(); - var filepath = @"E:/Manga/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Mujaki no Rakuen", Volumes = "12", - Chapters = "76", Filename = "Mujaki no Rakuen Vol12 ch76.cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:/Manga/Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1", - Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip"; - expected.Add(filepath, new ParserInfo - { - Series = "Beelzebub", Volumes = "0", - Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip"; - expected.Add(filepath, new ParserInfo - { - Series = "Ichinensei ni Nacchattara", Volumes = "1", - Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition", - Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "", - Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Dorohedoro", Volumes = "1", Edition = "", - Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "APOSIMZ", Volumes = "0", Edition = "", - Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "", - Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "", - Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive, - FullFilePath = filepath - }); - - filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr"; - expected.Add(filepath, new ParserInfo - { - Series = "Summer Time Rendering", Volumes = "0", Edition = "", - Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = true - }); - - filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "", - Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "", - Chapters = "0", Filename = "Vol. 00 Ch. 000.cbz", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "Toukyou Akazukin", Volumes = "1", Edition = "", - Chapters = "1", Filename = "Vol. 01 Ch. 001.cbz", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub"; - expected.Add(filepath, new ParserInfo - { - Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "", - Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub, - FullFilePath = filepath, IsSpecial = false - }); - - // If an image is cover exclusively, ignore it - filepath = @"E:\Manga\Seraph of the End\cover.png"; - expected.Add(filepath, null); - - filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz"; - expected.Add(filepath, new ParserInfo - { - Series = "The Beginning After the End", Volumes = "0", Edition = "", - Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg"; - expected.Add(filepath, new ParserInfo - { - Series = "Monster #8", Volumes = "0", Edition = "", - Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Archive, - FullFilePath = filepath, IsSpecial = false - }); - - - foreach (var file in expected.Keys) - { - var expectedInfo = expected[file]; - var actual = API.Parser.Parser.Parse(file, rootPath); - if (expectedInfo == null) - { - Assert.Null(actual); - return; - } - Assert.NotNull(actual); - _testOutputHelper.WriteLine($"Validating {file}"); - Assert.Equal(expectedInfo.Format, actual.Format); - _testOutputHelper.WriteLine("Format ✓"); - Assert.Equal(expectedInfo.Series, actual.Series); - _testOutputHelper.WriteLine("Series ✓"); - Assert.Equal(expectedInfo.Chapters, actual.Chapters); - _testOutputHelper.WriteLine("Chapters ✓"); - Assert.Equal(expectedInfo.Volumes, actual.Volumes); - _testOutputHelper.WriteLine("Volumes ✓"); - Assert.Equal(expectedInfo.Edition, actual.Edition); - _testOutputHelper.WriteLine("Edition ✓"); - Assert.Equal(expectedInfo.Filename, actual.Filename); - _testOutputHelper.WriteLine("Filename ✓"); - Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath); - _testOutputHelper.WriteLine("FullFilePath ✓"); - } - } } } diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs index 10aa326ca..652002398 100644 --- a/API.Tests/Parser/ParserTest.cs +++ b/API.Tests/Parser/ParserTest.cs @@ -6,6 +6,7 @@ namespace API.Tests.Parser public class ParserTests { + [Theory] [InlineData("Beastars - SP01", true)] [InlineData("Beastars SP01", true)] @@ -147,21 +148,7 @@ namespace API.Tests.Parser Assert.Equal(expected, CleanAuthor(expected)); } - [Theory] - [InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")] - [InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")] - [InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")] - public void FallbackTest(string rootDir, string inputPath, string expectedSeries) - { - var actual = Parse(inputPath, rootDir); - if (actual == null) - { - Assert.NotNull(actual); - return; - } - Assert.Equal(expectedSeries, actual.Series); - } [Theory] [InlineData("Love Hina - Special.jpg", false)] diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index 6c4d92d9d..9f286f71d 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -1,10 +1,11 @@ using System.Diagnostics; using System.IO; +using System.IO.Abstractions; using System.IO.Abstractions.TestingHelpers; using System.IO.Compression; +using System.Linq; using API.Archive; using API.Data.Metadata; -using API.Interfaces.Services; using API.Services; using Microsoft.Extensions.Logging; using NSubstitute; @@ -20,12 +21,12 @@ namespace API.Tests.Services private readonly ArchiveService _archiveService; private readonly ILogger _logger = Substitute.For>(); private readonly ILogger _directoryServiceLogger = Substitute.For>(); - private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For>(), new MockFileSystem()); + private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); public ArchiveServiceTests(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; - _archiveService = new ArchiveService(_logger, _directoryService); + _archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For>(), _directoryService)); } [Theory] @@ -108,15 +109,15 @@ namespace API.Tests.Services var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction"); - DirectoryService.ClearAndDeleteDirectory(extractDirectory); + _directoryService.ClearAndDeleteDirectory(extractDirectory); - Stopwatch sw = Stopwatch.StartNew(); + var sw = Stopwatch.StartNew(); _archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory); var di1 = new DirectoryInfo(extractDirectory); - Assert.Equal(expectedFileCount, di1.Exists ? di1.GetFiles().Length : 0); + Assert.Equal(expectedFileCount, di1.Exists ? _directoryService.GetFiles(extractDirectory, searchOption:SearchOption.AllDirectories).Count() : 0); _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); - DirectoryService.ClearAndDeleteDirectory(extractDirectory); + _directoryService.ClearAndDeleteDirectory(extractDirectory); } @@ -167,8 +168,8 @@ namespace API.Tests.Services var sw = Stopwatch.StartNew(); var outputDir = Path.Join(testDirectory, "output"); - DirectoryService.ClearAndDeleteDirectory(outputDir); - DirectoryService.ExistOrCreate(outputDir); + _directoryService.ClearAndDeleteDirectory(outputDir); + _directoryService.ExistOrCreate(outputDir); var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile), @@ -178,7 +179,7 @@ namespace API.Tests.Services Assert.Equal(expectedBytes, actual); _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); - DirectoryService.ClearAndDeleteDirectory(outputDir); + _directoryService.ClearAndDeleteDirectory(outputDir); } diff --git a/API.Tests/Services/BackupServiceTests.cs b/API.Tests/Services/BackupServiceTests.cs new file mode 100644 index 000000000..1af01632c --- /dev/null +++ b/API.Tests/Services/BackupServiceTests.cs @@ -0,0 +1,143 @@ +using System.Collections.Generic; +using System.Data.Common; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.Entities; +using API.Entities.Enums; +using API.Extensions; +using API.Services; +using API.Services.Tasks; +using API.SignalR; +using AutoMapper; +using Microsoft.AspNetCore.SignalR; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services; + +public class BackupServiceTests +{ + private readonly ILogger _logger = Substitute.For>(); + private readonly IUnitOfWork _unitOfWork; + private readonly IHubContext _messageHub = Substitute.For>(); + private readonly IConfiguration _config; + + private readonly DbConnection _connection; + private readonly DataContext _context; + + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string LogDirectory = "C:/kavita/config/logs/"; + + public BackupServiceTests() + { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + _unitOfWork = new UnitOfWork(_context, Substitute.For(), null); + _config = Substitute.For(); + + } + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = "C:/data/" + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDB() + { + _context.Series.RemoveRange(_context.Series.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(LogDirectory); + fileSystem.AddDirectory("C:/data/"); + + return fileSystem; + } + + #endregion + + + + #region GetLogFiles + + public void GetLogFiles_ExpectAllFiles_NoRollingFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{LogDirectory}kavita.log", new MockFileData("")); + filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), filesystem); + // You can't mock _config extensions because they are static + _config.GetMaxRollingFiles().Returns(1); + _config.GetLoggingFileName().Returns(ds.FileSystem.Path.Join(LogDirectory, "kavita.log")); + + var backupService = new BackupService(_logger, _unitOfWork, ds, _config, _messageHub); + + Assert.Single(backupService.GetLogFiles(1, LogDirectory)); + } + + + #endregion + +} diff --git a/API.Tests/Services/BookServiceTests.cs b/API.Tests/Services/BookServiceTests.cs index c46d0a40c..a2f498139 100644 --- a/API.Tests/Services/BookServiceTests.cs +++ b/API.Tests/Services/BookServiceTests.cs @@ -1,5 +1,5 @@ using System.IO; -using API.Interfaces.Services; +using System.IO.Abstractions; using API.Services; using Microsoft.Extensions.Logging; using NSubstitute; @@ -14,7 +14,8 @@ namespace API.Tests.Services public BookServiceTests() { - _bookService = new BookService(_logger); + var directoryService = new DirectoryService(Substitute.For>(), new FileSystem()); + _bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For>(), directoryService)); } [Theory] diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs index 410c43ade..29d8ece91 100644 --- a/API.Tests/Services/CacheServiceTests.cs +++ b/API.Tests/Services/CacheServiceTests.cs @@ -1,115 +1,440 @@ -namespace API.Tests.Services +using System.Collections.Generic; +using System.Data.Common; +using System.IO; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.Entities; +using API.Entities.Enums; +using API.Services; +using API.SignalR; +using AutoMapper; +using Microsoft.AspNetCore.SignalR; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services { public class CacheServiceTests { - // private readonly CacheService _cacheService; - // private readonly ILogger _logger = Substitute.For>(); - // private readonly IUnitOfWork _unitOfWork = Substitute.For(); - // private readonly IArchiveService _archiveService = Substitute.For(); - // private readonly IDirectoryService _directoryService = Substitute.For(); - // - // public CacheServiceTests() - // { - // _cacheService = new CacheService(_logger, _unitOfWork, _archiveService, _directoryService); - // } - + private readonly ILogger _logger = Substitute.For>(); + private readonly IUnitOfWork _unitOfWork; + private readonly IHubContext _messageHub = Substitute.For>(); + + private readonly DbConnection _connection; + private readonly DataContext _context; + + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string DataDirectory = "C:/data/"; + + public CacheServiceTests() + { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + _unitOfWork = new UnitOfWork(_context, Substitute.For(), null); + } + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = "C:/data/" + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDB() + { + _context.Series.RemoveRange(_context.Series.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(DataDirectory); + + return fileSystem; + } + + #endregion + + #region Ensure + + [Fact] + public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData("")); + filesystem.AddDirectory($"{CacheDirectory}1/"); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + await ResetDB(); + var s = DbFactory.Series("Test"); + var v = DbFactory.Volume("1"); + var c = new Chapter() + { + Number = "1", + Files = new List() + { + new MangaFile() + { + Format = MangaFormat.Archive, + FilePath = $"{DataDirectory}Test v1.zip", + } + } + }; + v.Chapters.Add(c); + s.Volumes.Add(v); + s.LibraryId = 1; + _context.Series.Add(s); + + await _context.SaveChangesAsync(); + + await cleanupService.Ensure(1); + Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories)); + } + // [Fact] - // public async void Ensure_ShouldExtractArchive(int chapterId) + // public async Task Ensure_DirectoryAlreadyExists_ExtractsImages() // { - // - // // CacheDirectory needs to be customized. - // _unitOfWork.VolumeRepository.GetChapterAsync(chapterId).Returns(new Chapter + // // TODO: Figure out a way to test this + // var filesystem = CreateFileSystem(); + // filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData("")); + // filesystem.AddDirectory($"{CacheDirectory}1/"); + // var ds = new DirectoryService(Substitute.For>(), filesystem); + // var archiveService = Substitute.For(); + // archiveService.ExtractArchive($"{DataDirectory}Test v1.zip", + // filesystem.Path.Join(CacheDirectory, "1")); + // var cleanupService = new CacheService(_logger, _unitOfWork, ds, + // new ReadingItemService(archiveService, Substitute.For(), Substitute.For(), ds)); + // + // await ResetDB(); + // var s = DbFactory.Series("Test"); + // var v = DbFactory.Volume("1"); + // var c = new Chapter() // { - // Id = 1, + // Number = "1", // Files = new List() // { // new MangaFile() // { - // FilePath = "" + // Format = MangaFormat.Archive, + // FilePath = $"{DataDirectory}Test v1.zip", // } // } - // }); - // - // await _cacheService.Ensure(1); - // - // var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/CacheService/Archives"); + // }; + // v.Chapters.Add(c); + // s.Volumes.Add(v); + // s.LibraryId = 1; + // _context.Series.Add(s); // - // } - - //string GetCachedPagePath(Volume volume, int page) - // [Fact] - // //[InlineData("", 0, "")] - // public void GetCachedPagePathTest_Should() - // { - // - // // string archivePath = "flat file.zip"; - // // int pageNum = 0; - // // string expected = "cache/1/pexels-photo-6551949.jpg"; - // // - // // var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); - // // var file = Path.Join(testDirectory, archivePath); - // // var volume = new Volume - // // { - // // Id = 1, - // // Files = new List() - // // { - // // new() - // // { - // // Id = 1, - // // Chapter = 0, - // // FilePath = archivePath, - // // Format = MangaFormat.Archive, - // // Pages = 1, - // // } - // // }, - // // Name = "1", - // // Number = 1 - // // }; - // // - // // var cacheService = Substitute.ForPartsOf(); - // // cacheService.Configure().CacheDirectoryIsAccessible().Returns(true); - // // cacheService.Configure().GetVolumeCachePath(1, volume.Files.ElementAt(0)).Returns("cache/1/"); - // // _directoryService.Configure().GetFilesWithExtension("cache/1/").Returns(new string[] {"pexels-photo-6551949.jpg"}); - // // Assert.Equal(expected, _cacheService.GetCachedPagePath(volume, pageNum)); - // //Assert.True(true); - // } + // await _context.SaveChangesAsync(); // - // [Fact] - // public void GetOrderedChaptersTest() - // { - // // var files = new List() - // // { - // // new() - // // { - // // Number = "1" - // // }, - // // new() - // // { - // // Chapter = 2 - // // }, - // // new() - // // { - // // Chapter = 0 - // // }, - // // }; - // // var expected = new List() - // // { - // // new() - // // { - // // Chapter = 1 - // // }, - // // new() - // // { - // // Chapter = 2 - // // }, - // // new() - // // { - // // Chapter = 0 - // // }, - // // }; - // // Assert.NotStrictEqual(expected, _cacheService.GetOrderedChapters(files)); + // await cleanupService.Ensure(1); + // Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories)); // } - // - + + + #endregion + + #region CleanupChapters + + [Fact] + public void CleanupChapters_AllFilesShouldBeDeleted() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData("")); + filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData("")); + filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData("")); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + cleanupService.CleanupChapters(new []{1, 3}); + Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories)); + } + + + #endregion + + #region GetCachedEpubFile + + [Fact] + public void GetCachedEpubFile_ShouldReturnFirstEpub() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData("")); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + var c = new Chapter() + { + Files = new List() + { + new MangaFile() + { + FilePath = $"{DataDirectory}1.epub" + }, + new MangaFile() + { + FilePath = $"{DataDirectory}2.epub" + } + } + }; + cs.GetCachedEpubFile(1, c); + Assert.Same($"{DataDirectory}1.epub", cs.GetCachedEpubFile(1, c)); + } + + #endregion + + #region GetCachedPagePath + + [Fact] + public void GetCachedPagePath_ReturnNullIfNoFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + }; + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages - 1; i++) + { + filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + var path = cs.GetCachedPagePath(c, 11); + Assert.Equal(string.Empty, path); + } + + [Fact] + public void GetCachedPagePath_GetFileFromFirstFile() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + { + new MangaFile() + { + Id = 1, + FilePath = $"{DataDirectory}1.zip", + Pages = 10 + + }, + new MangaFile() + { + Id = 2, + FilePath = $"{DataDirectory}2.zip", + Pages = 5 + } + } + }; + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages; i++) + { + filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0))); + + } + + + [Fact] + public void GetCachedPagePath_GetLastPageFromSingleFile() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + { + new MangaFile() + { + Id = 1, + FilePath = $"{DataDirectory}1.zip", + Pages = 10 + + } + } + }; + c.Pages = c.Files.Sum(f => f.Pages); + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages; i++) + { + filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + // Remember that we start at 0, so this is the 10th file + var path = cs.GetCachedPagePath(c, c.Pages); + Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path)); + } + + [Fact] + public void GetCachedPagePath_GetFileFromSecondFile() + { + var filesystem = CreateFileSystem(); + filesystem.AddDirectory($"{CacheDirectory}1/"); + filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData("")); + filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData("")); + + var c = new Chapter() + { + Id = 1, + Files = new List() + { + new MangaFile() + { + Id = 1, + FilePath = $"{DataDirectory}1.zip", + Pages = 10 + + }, + new MangaFile() + { + Id = 2, + FilePath = $"{DataDirectory}2.zip", + Pages = 5 + } + } + }; + + var fileIndex = 0; + foreach (var file in c.Files) + { + for (var i = 0; i < file.Pages; i++) + { + filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData("")); + } + + fileIndex++; + } + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cs = new CacheService(_logger, _unitOfWork, ds, + new ReadingItemService(Substitute.For(), Substitute.For(), Substitute.For(), ds)); + + // Flatten to prepare for how GetFullPath expects + ds.Flatten($"{CacheDirectory}1/"); + + // Remember that we start at 0, so this is the page + 1 file + var path = cs.GetCachedPagePath(c, 10); + Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path)); + } + + #endregion + } -} \ No newline at end of file +} diff --git a/API.Tests/Services/CleanupServiceTests.cs b/API.Tests/Services/CleanupServiceTests.cs new file mode 100644 index 000000000..b6e4f9454 --- /dev/null +++ b/API.Tests/Services/CleanupServiceTests.cs @@ -0,0 +1,359 @@ +using System; +using System.Collections.Generic; +using System.Data.Common; +using System.IO; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.Entities; +using API.Entities.Enums; +using API.Services; +using API.Services.Tasks; +using API.SignalR; +using AutoMapper; +using Microsoft.AspNetCore.SignalR; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services; + +public class CleanupServiceTests +{ + private readonly ILogger _logger = Substitute.For>(); + private readonly IUnitOfWork _unitOfWork; + private readonly IHubContext _messageHub = Substitute.For>(); + + private readonly DbConnection _connection; + private readonly DataContext _context; + + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + + + public CleanupServiceTests() + { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + _unitOfWork = new UnitOfWork(_context, Substitute.For(), null); + } + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = "C:/data/" + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDB() + { + _context.Series.RemoveRange(_context.Series.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory("C:/data/"); + + return fileSystem; + } + + #endregion + + + #region DeleteSeriesCoverImages + + [Fact] + public async Task DeleteSeriesCoverImages_ShouldDeleteAll() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CoverImageDirectory}series_01.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}series_03.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}series_1000.jpg", new MockFileData("")); + + // Delete all Series to reset state + await ResetDB(); + + var s = DbFactory.Series("Test 1"); + s.CoverImage = "series_01.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + s = DbFactory.Series("Test 2"); + s.CoverImage = "series_03.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + s = DbFactory.Series("Test 3"); + s.CoverImage = "series_1000.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + + await cleanupService.DeleteSeriesCoverImages(); + + Assert.Empty(ds.GetFiles(CoverImageDirectory)); + } + + [Fact] + public async Task DeleteSeriesCoverImages_ShouldNotDeleteLinkedFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CoverImageDirectory}series_01.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}series_03.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}series_1000.jpg", new MockFileData("")); + + // Delete all Series to reset state + await ResetDB(); + + // Add 2 series with cover images + var s = DbFactory.Series("Test 1"); + s.CoverImage = "series_01.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + s = DbFactory.Series("Test 2"); + s.CoverImage = "series_03.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + + + await _context.SaveChangesAsync(); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + + await cleanupService.DeleteSeriesCoverImages(); + + Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count()); + } + #endregion + + #region DeleteChapterCoverImages + [Fact] + public async Task DeleteChapterCoverImages_ShouldNotDeleteLinkedFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CoverImageDirectory}v01_c01.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}v01_c03.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}v01_c1000.jpg", new MockFileData("")); + + // Delete all Series to reset state + await ResetDB(); + + // Add 2 series with cover images + var s = DbFactory.Series("Test 1"); + var v = DbFactory.Volume("1"); + v.Chapters.Add(new Chapter() + { + CoverImage = "v01_c01.jpg" + }); + v.CoverImage = "v01_c01.jpg"; + s.Volumes.Add(v); + s.CoverImage = "series_01.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + + s = DbFactory.Series("Test 2"); + v = DbFactory.Volume("1"); + v.Chapters.Add(new Chapter() + { + CoverImage = "v01_c03.jpg" + }); + v.CoverImage = "v01_c03jpg"; + s.Volumes.Add(v); + s.CoverImage = "series_03.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + + + await _context.SaveChangesAsync(); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + + await cleanupService.DeleteChapterCoverImages(); + + Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count()); + } + #endregion + + #region DeleteTagCoverImages + + [Fact] + public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CoverImageDirectory}tag_01.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}tag_02.jpg", new MockFileData("")); + filesystem.AddFile($"{CoverImageDirectory}tag_1000.jpg", new MockFileData("")); + + // Delete all Series to reset state + await ResetDB(); + + // Add 2 series with cover images + var s = DbFactory.Series("Test 1"); + s.Metadata.CollectionTags = new List(); + s.Metadata.CollectionTags.Add(new CollectionTag() + { + Title = "Something", + CoverImage ="tag_01.jpg" + }); + s.CoverImage = "series_01.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + + s = DbFactory.Series("Test 2"); + s.Metadata.CollectionTags = new List(); + s.Metadata.CollectionTags.Add(new CollectionTag() + { + Title = "Something 2", + CoverImage ="tag_02.jpg" + }); + s.CoverImage = "series_03.jpg"; + s.LibraryId = 1; + _context.Series.Add(s); + + + await _context.SaveChangesAsync(); + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + + await cleanupService.DeleteTagCoverImages(); + + Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count()); + } + + #endregion + + #region CleanupCacheDirectory + + [Fact] + public void CleanupCacheDirectory_ClearAllFiles() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CacheDirectory}01.jpg", new MockFileData("")); + filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + cleanupService.CleanupCacheDirectory(); + Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories)); + } + + [Fact] + public void CleanupCacheDirectory_ClearAllFilesInSubDirectory() + { + var filesystem = CreateFileSystem(); + filesystem.AddFile($"{CacheDirectory}01.jpg", new MockFileData("")); + filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + cleanupService.CleanupCacheDirectory(); + Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories)); + } + + #endregion + + #region CleanupBackups + + [Fact] + public void CleanupBackups_LeaveOneFile_SinceAllAreExpired() + { + var filesystem = CreateFileSystem(); + var filesystemFile = new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31)) + }; + filesystem.AddFile($"{BackupDirectory}kavita_backup_11_29_2021_12_00_13 AM.zip", filesystemFile); + filesystem.AddFile($"{BackupDirectory}kavita_backup_12_3_2021_9_27_58 AM.zip", filesystemFile); + filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile); + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + cleanupService.CleanupBackups(); + Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories)); + } + + [Fact] + public void CleanupBackups_LeaveLestExpired() + { + var filesystem = CreateFileSystem(); + var filesystemFile = new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31)) + }; + filesystem.AddFile($"{BackupDirectory}kavita_backup_11_29_2021_12_00_13 AM.zip", filesystemFile); + filesystem.AddFile($"{BackupDirectory}kavita_backup_12_3_2021_9_27_58 AM.zip", filesystemFile); + filesystem.AddFile($"{BackupDirectory}randomfile.zip", new MockFileData("") + { + CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(14)) + }); + + var ds = new DirectoryService(Substitute.For>(), filesystem); + var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub, + ds); + cleanupService.CleanupBackups(); + Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip")); + } + + #endregion +} diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs index f51e464e6..62ace504f 100644 --- a/API.Tests/Services/DirectoryServiceTests.cs +++ b/API.Tests/Services/DirectoryServiceTests.cs @@ -1,8 +1,11 @@ using System; using System.Collections.Generic; using System.IO; +using System.IO.Abstractions; using System.IO.Abstractions.TestingHelpers; using System.Linq; +using System.Text; +using System.Threading.Tasks; using API.Services; using Microsoft.Extensions.Logging; using NSubstitute; @@ -18,87 +21,607 @@ namespace API.Tests.Services public DirectoryServiceTests() { - _directoryService = new DirectoryService(_logger, new MockFileSystem()); + var filesystem = new MockFileSystem() + { + + }; + + _directoryService = new DirectoryService(_logger, filesystem); } + + #region TraverseTreeParallelForEach [Fact] - public void GetFilesTest_Should_Be28() + public void TraverseTreeParallelForEach_JustArchives_ShouldBe28() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga"); - // ReSharper disable once CollectionNeverQueried.Local + var testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 28; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); var files = new List(); - var fileCount = _directoryService.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), + var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), API.Parser.Parser.ArchiveFileExtensions, _logger); Assert.Equal(28, fileCount); + Assert.Equal(28, files.Count); } [Fact] - public void GetFiles_WithCustomRegex_ShouldPass_Test() + public void TraverseTreeParallelForEach_DontCountExcludedDirectories_ShouldBe28() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/regex"); - var files = DirectoryService.GetFiles(testDirectory, @"file\d*.txt"); - Assert.Equal(2, files.Count()); + var testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 28; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{Path.Join(testDirectory, "@eaDir")}file_{29}.jpg", new MockFileData("")); + fileSystem.AddFile($"{Path.Join(testDirectory, ".DS_Store")}file_{30}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = new List(); + var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), + API.Parser.Parser.ArchiveFileExtensions, _logger); + + Assert.Equal(28, fileCount); + Assert.Equal(28, files.Count); + } + #endregion + + #region GetFilesWithCertainExtensions + [Fact] + public void GetFilesWithCertainExtensions_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFilesWithExtension(testDirectory, API.Parser.Parser.ArchiveFileExtensions); + + Assert.Equal(10, files.Length); + Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip")); } [Fact] - public void GetFiles_TopLevel_ShouldBeEmpty_Test() + public void GetFilesWithCertainExtensions_OnlyArchives() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService"); - var files = DirectoryService.GetFiles(testDirectory); - Assert.Empty(files); + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{testDirectory}file_{29}.rar", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFilesWithExtension(testDirectory, ".zip|.rar"); + + Assert.Equal(11, files.Length); + } + #endregion + + #region GetFiles + [Fact] + public void GetFiles_ArchiveOnly_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory, API.Parser.Parser.ArchiveFileExtensions).ToList(); + + Assert.Equal(10, files.Count()); + Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip")); } [Fact] - public void GetFilesWithExtensions_ShouldBeEmpty_Test() + public void GetFiles_All_ShouldBe11() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/extensions"); - var files = DirectoryService.GetFiles(testDirectory, "*.txt"); - Assert.Empty(files); + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(11, files.Count()); } [Fact] - public void GetFilesWithExtensions_Test() + public void GetFiles_All_MixedPathSeparators() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/extension"); - var files = DirectoryService.GetFiles(testDirectory, ".cbz|.rar"); - Assert.Equal(3, files.Count()); + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"/manga\\file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(11, files.Count()); } [Fact] - public void GetFilesWithExtensions_BadDirectory_ShouldBeEmpty_Test() + public void GetFiles_All_TopDirectoryOnly_ShouldBe10() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/doesntexist"); - var files = DirectoryService.GetFiles(testDirectory, ".cbz|.rar"); - Assert.Empty(files); + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(10, files.Count()); } [Fact] - public void ListDirectory_SubDirectory_Test() + public void GetFiles_WithSubDirectories_ShouldCountOnlyTopLevel() { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/"); - var dirs = _directoryService.ListDirectory(testDirectory); - Assert.Contains(dirs, s => s.Contains("regex")); + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(10, files.Count()); } [Fact] - public void ListDirectory_NoSubDirectory_Test() + public void GetFiles_ShouldNotReturnFilesThatAreExcluded() { - var dirs = _directoryService.ListDirectory(""); - Assert.DoesNotContain(dirs, s => s.Contains("regex")); + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}/._file_{29}.jpg", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory).ToList(); + + Assert.Equal(10, files.Count()); } + [Fact] + public void GetFiles_WithCustomRegex_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}data-{i}.txt", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); + fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory, @".*d.*\.txt"); + Assert.Equal(11, files.Count()); + } + + [Fact] + public void GetFiles_WithCustomRegexThatContainsFolder_ShouldBe10() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); + fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var files = ds.GetFiles(testDirectory, @".*d.*\.txt", SearchOption.AllDirectories); + Assert.Equal(11, files.Count()); + } + #endregion + + #region GetTotalSize + [Fact] + public void GetTotalSize_ShouldBeGreaterThan0() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData("")); + + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var fileSize = ds.GetTotalSize(fileSystem.AllFiles); + Assert.True(fileSize > 0); + } + #endregion + + #region CopyFileToDirectory + [Fact] + public void CopyFileToDirectory_ShouldCopyFileToNonExistentDirectory() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/"); + Assert.True(fileSystem.FileExists("manga/output/data-0.txt")); + Assert.True(fileSystem.FileExists("manga/file/data-0.txt")); + } + [Fact] + public void CopyFileToDirectory_ShouldCopyFileToExistingDirectoryAndOverwrite() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/"); + Assert.True(fileSystem.FileExists("/manga/output/data-0.txt")); + Assert.True(fileSystem.FileExists("/manga/file/data-0.txt")); + Assert.True(fileSystem.FileInfo.FromFileName("/manga/file/data-0.txt").Length == fileSystem.FileInfo.FromFileName("/manga/output/data-0.txt").Length); + } + #endregion + + #region CopyDirectoryToDirectory + [Fact] + public void CopyDirectoryToDirectory_ShouldThrowWhenSourceDestinationDoesntExist() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var ex = Assert.Throws(() => ds.CopyDirectoryToDirectory("/comics/", "/manga/output/")); + Assert.Equal(ex.Message, "Source directory does not exist or could not be found: " + "/comics/"); + } + + [Fact] + public void CopyDirectoryToDirectory_ShouldCopyEmptyDirectory() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyDirectoryToDirectory($"{testDirectory}empty/", "/manga/output/"); + Assert.Empty(fileSystem.DirectoryInfo.FromDirectoryName("/manga/output/").GetFiles()); + } + + [Fact] + public void CopyDirectoryToDirectory_ShouldCopyAllFileAndNestedDirectoriesOver() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-1.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyDirectoryToDirectory($"{testDirectory}", "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/", searchOption: SearchOption.AllDirectories).Count()); + } + #endregion + + #region IsDriveMounted + [Fact] + public void IsDriveMounted_DriveIsNotMounted() + { + const string testDirectory = "c:/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + Assert.False(ds.IsDriveMounted("d:/manga/")); + } + + [Fact] + public void IsDriveMounted_DriveIsMounted() + { + const string testDirectory = "c:/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc")); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + Assert.True(ds.IsDriveMounted("c:/manga/file")); + } + #endregion + + #region ExistOrCreate + [Fact] + public void ExistOrCreate_ShouldCreate() + { + var fileSystem = new MockFileSystem(); + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ExistOrCreate("c:/manga/output/"); + + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("c:/manga/output/").Exists); + } + #endregion + + #region ClearAndDeleteDirectory + [Fact] + public void ClearAndDeleteDirectory_ShouldDeleteSelfAndAllFilesAndFolders() + { + const string testDirectory = "/manga/base/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ClearAndDeleteDirectory($"{testDirectory}"); + Assert.Empty(ds.GetFiles("/manga/", searchOption: SearchOption.AllDirectories)); + Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").GetDirectories()); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists); + Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/base").Exists); + } + #endregion + + #region ClearDirectory + [Fact] + public void ClearDirectory_ShouldDeleteAllFilesAndFolders_LeaveSelf() + { + const string testDirectory = "/manga/base/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc")); + fileSystem.AddDirectory($"{testDirectory}file/empty/"); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ClearDirectory($"{testDirectory}file/"); + Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").GetDirectories()); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists); + } + + [Fact] + public void ClearDirectory_ShouldDeleteFoldersWithOneFileInside() + { + const string testDirectory = "/manga/base/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.ClearDirectory($"{testDirectory}"); + Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}").GetDirectories()); + Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName(testDirectory).Exists); + Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists); + } + #endregion + + #region CopyFilesToDirectory + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFiles() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip"}, "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFiles_InclFilesInNestedFolders() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + fileSystem.AddFile($"{testDirectory}nested/file_11.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, "/manga/output/"); + Assert.Equal(3, ds.GetFiles("/manga/output/").Count()); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveAllFiles_WithPrepend() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, + "/manga/output/", "mangarocks_"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + Assert.All(ds.GetFiles("/manga/output/"), filepath => ds.FileSystem.Path.GetFileName(filepath).StartsWith("mangarocks_")); + } + + [Fact] + public void CopyFilesToDirectory_ShouldMoveOnlyFilesThatExist() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + for (var i = 0; i < 10; i++) + { + fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData("")); + } + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, + "/manga/output/"); + Assert.Equal(2, ds.GetFiles("/manga/output/").Count()); + } + + #endregion + + #region ListDirectory + [Fact] + public void ListDirectory_EmptyForNonExistent() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Empty(ds.ListDirectory("/comics/")); + } + + [Fact] + public void ListDirectory_ListsAllDirectories() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory($"{testDirectory}dir1"); + fileSystem.AddDirectory($"{testDirectory}dir2"); + fileSystem.AddDirectory($"{testDirectory}dir3"); + fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Equal(3, ds.ListDirectory(testDirectory).Count()); + } + + [Fact] + public void ListDirectory_ListsOnlyNonSystemAndHiddenOnly() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory($"{testDirectory}dir1"); + var di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir1"); + di.Attributes |= FileAttributes.System; + fileSystem.AddDirectory($"{testDirectory}dir2"); + di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir2"); + di.Attributes |= FileAttributes.Hidden; + fileSystem.AddDirectory($"{testDirectory}dir3"); + fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + Assert.Equal(1, ds.ListDirectory(testDirectory).Count()); + } + + #endregion + + #region ReadFileAsync + + [Fact] + public async Task ReadFileAsync_ShouldGetBytes() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var bytes = await ds.ReadFileAsync($"{testDirectory}file_1.zip"); + Assert.Equal(Encoding.UTF8.GetBytes("Hello"), bytes); + } + + [Fact] + public async Task ReadFileAsync_ShouldReadNothingFromNonExistent() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var bytes = await ds.ReadFileAsync($"{testDirectory}file_32123.zip"); + Assert.Empty(bytes); + } + + + #endregion + + #region FindHighestDirectoriesFromFiles + [Theory] [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] - public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] folders, string expectedDirectory) + [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")] + [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")] + public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory) { - var actual = DirectoryService.FindHighestDirectoriesFromFiles(rootDirectories, folders); - var expected = new Dictionary {{expectedDirectory, ""}}; + var fileSystem = new MockFileSystem(); + foreach (var directory in rootDirectories) + { + fileSystem.AddDirectory(directory); + } + foreach (var f in files) + { + fileSystem.AddFile(f, new MockFileData("")); + } + var ds = new DirectoryService(Substitute.For>(), fileSystem); + + var actual = ds.FindHighestDirectoriesFromFiles(rootDirectories, files); + var expected = new Dictionary(); + if (!string.IsNullOrEmpty(expectedDirectory)) + { + expected = new Dictionary {{expectedDirectory, ""}}; + } + Assert.Equal(expected, actual); } + #endregion + + #region GetFoldersTillRoot + [Theory] [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")] @@ -115,12 +638,78 @@ namespace API.Tests.Services [InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")] public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray) { + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(rootPath); + fileSystem.AddFile(fullpath, new MockFileData("")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + var expected = expectedArray.Split(","); if (expectedArray.Equals(string.Empty)) { - expected = Array.Empty(); + expected = Array.Empty(); } - Assert.Equal(expected, DirectoryService.GetFoldersTillRoot(rootPath, fullpath)); + Assert.Equal(expected, ds.GetFoldersTillRoot(rootPath, fullpath)); } + + #endregion + + #region RemoveNonImages + + [Fact] + public void RemoveNonImages() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.RemoveNonImages($"{testDirectory}"); + Assert.False(fileSystem.FileExists($"{testDirectory}file/data-0.txt")); + Assert.Equal(3, ds.GetFiles($"{testDirectory}", searchOption:SearchOption.AllDirectories).Count()); + } + + #endregion + + #region Flatten + + [Fact] + public void Flatten_ShouldDoNothing() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.Flatten($"{testDirectory}"); + Assert.True(fileSystem.FileExists($"{testDirectory}data-1.jpg")); + Assert.True(fileSystem.FileExists($"{testDirectory}data-2.png")); + Assert.True(fileSystem.FileExists($"{testDirectory}data-3.webp")); + } + + [Fact] + public void Flatten_ShouldFlatten() + { + const string testDirectory = "/manga/"; + var fileSystem = new MockFileSystem(); + fileSystem.AddDirectory(testDirectory); + fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc")); + fileSystem.AddFile($"{testDirectory}subdir/data-3.webp", new MockFileData("abc")); + + var ds = new DirectoryService(Substitute.For>(), fileSystem); + ds.Flatten($"{testDirectory}"); + Assert.Equal(2, ds.GetFiles(testDirectory).Count()); + Assert.False(fileSystem.FileExists($"{testDirectory}subdir/data-3.webp")); + Assert.True(fileSystem.Directory.Exists($"{testDirectory}subdir/")); + } + + #endregion } } diff --git a/API.Tests/Services/ParseScannedFilesTests.cs b/API.Tests/Services/ParseScannedFilesTests.cs new file mode 100644 index 000000000..a8adccb05 --- /dev/null +++ b/API.Tests/Services/ParseScannedFilesTests.cs @@ -0,0 +1,164 @@ +using System.Collections.Generic; +using System.Data.Common; +using System.IO.Abstractions.TestingHelpers; +using System.Linq; +using System.Threading.Tasks; +using API.Data; +using API.Data.Metadata; +using API.Entities; +using API.Entities.Enums; +using API.Parser; +using API.Services; +using API.Services.Tasks.Scanner; +using API.SignalR; +using AutoMapper; +using Microsoft.AspNetCore.SignalR; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services; + +internal class MockReadingItemService : IReadingItemService +{ + public ComicInfo GetComicInfo(string filePath, MangaFormat format) + { + throw new System.NotImplementedException(); + } + + public int GetNumberOfPages(string filePath, MangaFormat format) + { + throw new System.NotImplementedException(); + } + + public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format) + { + throw new System.NotImplementedException(); + } + + public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1) + { + throw new System.NotImplementedException(); + } + + public ParserInfo Parse(string path, string rootPath, LibraryType type) + { + throw new System.NotImplementedException(); + } +} + +public class ParseScannedFilesTests +{ + private readonly ILogger _logger = Substitute.For>(); + private readonly IUnitOfWork _unitOfWork; + + private readonly DbConnection _connection; + private readonly DataContext _context; + + private const string CacheDirectory = "C:/kavita/config/cache/"; + private const string CoverImageDirectory = "C:/kavita/config/covers/"; + private const string BackupDirectory = "C:/kavita/config/backups/"; + private const string DataDirectory = "C:/data/"; + + public ParseScannedFilesTests() + { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + _unitOfWork = new UnitOfWork(_context, Substitute.For(), null); + + // Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work + } + + #region Setup + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + var filesystem = CreateFileSystem(); + + await Seed.SeedSettings(_context, new DirectoryService(Substitute.For>(), filesystem)); + + var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync(); + setting.Value = CacheDirectory; + + setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync(); + setting.Value = BackupDirectory; + + _context.ServerSetting.Update(setting); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = DataDirectory + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + private async Task ResetDB() + { + _context.Series.RemoveRange(_context.Series.ToList()); + + await _context.SaveChangesAsync(); + } + + private static MockFileSystem CreateFileSystem() + { + var fileSystem = new MockFileSystem(); + fileSystem.Directory.SetCurrentDirectory("C:/kavita/"); + fileSystem.AddDirectory("C:/kavita/config/"); + fileSystem.AddDirectory(CacheDirectory); + fileSystem.AddDirectory(CoverImageDirectory); + fileSystem.AddDirectory(BackupDirectory); + fileSystem.AddDirectory(DataDirectory); + + return fileSystem; + } + + #endregion + + #region GetInfosByName + + [Fact] + public void GetInfosByName() + { + + } + + #endregion + + #region MergeName + + [Fact] + public void MergeName_() + { + + } + + #endregion +} diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index 4c32e498c..9e3c5d81c 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -11,8 +11,6 @@ using API.Entities; using API.Entities.Enums; using API.Entities.Metadata; using API.Helpers; -using API.Interfaces; -using API.Interfaces.Services; using API.Parser; using API.Services; using API.Services.Tasks; @@ -29,76 +27,8 @@ using Xunit; namespace API.Tests.Services { - public class ScannerServiceTests : IDisposable + public class ScannerServiceTests { - private readonly ScannerService _scannerService; - private readonly ILogger _logger = Substitute.For>(); - private readonly IArchiveService _archiveService = Substitute.For(); - private readonly IBookService _bookService = Substitute.For(); - private readonly IImageService _imageService = Substitute.For(); - private readonly IDirectoryService _directoryService = Substitute.For(); - private readonly ILogger _metadataLogger = Substitute.For>(); - private readonly ICacheService _cacheService; - private readonly IHubContext _messageHub = Substitute.For>(); - - private readonly DbConnection _connection; - private readonly DataContext _context; - - - public ScannerServiceTests() - { - var contextOptions = new DbContextOptionsBuilder() - .UseSqlite(CreateInMemoryDatabase()) - .Options; - _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; - - _context = new DataContext(contextOptions); - Task.Run(SeedDb).GetAwaiter().GetResult(); - - IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For(), null); - - var file = new MockFileData("") - { - LastWriteTime = DateTimeOffset.Now.Subtract(TimeSpan.FromMinutes(1)) - }; - var fileSystem = new MockFileSystem(new Dictionary - { - { "/data/Darker than Black.zip", file }, - { "/data/Cage of Eden - v10.cbz", file }, - { "/data/Cage of Eden - v1.cbz", file }, - }); - - var fileService = new FileService(fileSystem); - ICacheHelper cacheHelper = new CacheHelper(fileService); - - - IMetadataService metadataService = - Substitute.For(unitOfWork, _metadataLogger, _archiveService, - _bookService, _imageService, _messageHub, cacheHelper); - _scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService, - _cacheService, _messageHub, fileService, _directoryService); - } - - private async Task SeedDb() - { - await _context.Database.MigrateAsync(); - await Seed.SeedSettings(_context); - - _context.Library.Add(new Library() - { - Name = "Manga", - Folders = new List() - { - new FolderPath() - { - Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga") - } - } - }); - return await _context.SaveChangesAsync() > 0; - } - - [Fact] public void AddOrUpdateFileForChapter() { @@ -227,16 +157,5 @@ namespace API.Tests.Services } } - - private static DbConnection CreateInMemoryDatabase() - { - var connection = new SqliteConnection("Filename=:memory:"); - - connection.Open(); - - return connection; - } - - public void Dispose() => _connection.Dispose(); } } diff --git a/API.Tests/generate_test_data.py b/API.Tests/generate_test_data.py deleted file mode 100644 index 69652969a..000000000 --- a/API.Tests/generate_test_data.py +++ /dev/null @@ -1,80 +0,0 @@ -""" This script should be run on a directory which will generate a test case file - that can be loaded into the renametest.py""" -import os -from pathlib import Path -import shutil - -verbose = False - -def print_log(val): - if verbose: - print(val) - - -def create_test_base(file, root_dir): - """ Creates and returns a new base directory for data creation for a given testcase.""" - base_dir = os.path.split(file.split('-testcase.txt')[0])[-1] - print_log('base_dir: {0}'.format(base_dir)) - new_dir = os.path.join(root_dir, base_dir) - print_log('new dir: {0}'.format(new_dir)) - p = Path(new_dir) - if not p.exists(): - os.mkdir(new_dir) - - return new_dir - - - -def generate_data(file, root_dir): - ''' Generates directories and fake files for testing against ''' - - base_dir = '' - if file.endswith('-testcase.txt'): - base_dir = create_test_base(file, root_dir) - - files_to_create = [] - with open(file, 'r') as in_file: - files_to_create = in_file.read().splitlines() - - for filepath in files_to_create: - for part in os.path.split(filepath): - part_path = os.path.join(base_dir, part) - print_log('Checking if {0} exists '.format(part_path)) - p = Path(part_path) - - if not p.exists(): - print_log('Creating: {0}'.format(part)) - - if p.suffix != '': - with open(os.path.join(root_dir, base_dir + '/' + filepath), 'w+') as f: - f.write('') - else: - os.mkdir(part_path) - -def clean_up_generated_data(root_dir): - for root, dirs, files in os.walk(root_dir): - for dir in dirs: - shutil.rmtree(os.path.join(root, dir)) - for file in files: - if not file.endswith('-testcase.txt'): - print_log('Removing {0}'.format(os.path.join(root, file))) - os.remove(os.path.join(root, file)) - - -def generate_test_file(): - root_dir = os.path.abspath('.') - current_folder = os.path.split(root_dir)[-1] - out_files = [] - for root, _, files in os.walk(root_dir): - for file in files: - if not file.endswith('-testcase.txt'): - filename = os.path.join(root.replace(root_dir, ''), file) # root_dir or root_dir + '//'? - out_files.append(filename) - - with open(os.path.join(root_dir, current_folder + '-testcase.txt'), 'w+') as f: - for filename in out_files: - f.write(filename + '\n') - -if __name__ == '__main__': - verbose = True - generate_test_file() \ No newline at end of file diff --git a/API/API.csproj.DotSettings b/API/API.csproj.DotSettings index 80aad93c5..c7410bba2 100644 --- a/API/API.csproj.DotSettings +++ b/API/API.csproj.DotSettings @@ -1,2 +1,3 @@  - True \ No newline at end of file + True + True \ No newline at end of file diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 3c9960402..4dd7d5318 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -4,12 +4,11 @@ using System.Linq; using System.Reflection; using System.Threading.Tasks; using API.Constants; +using API.Data; using API.DTOs; using API.DTOs.Account; using API.Entities; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using AutoMapper; using Kavita.Common; diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs index cf5e66e22..e05545ce2 100644 --- a/API/Controllers/BookController.cs +++ b/API/Controllers/BookController.cs @@ -1,12 +1,11 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; +using API.Data; using API.DTOs; using API.DTOs.Reader; using API.Entities.Enums; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using HtmlAgilityPack; using Microsoft.AspNetCore.Mvc; diff --git a/API/Controllers/CollectionController.cs b/API/Controllers/CollectionController.cs index aad1d91e4..2b69499de 100644 --- a/API/Controllers/CollectionController.cs +++ b/API/Controllers/CollectionController.cs @@ -4,10 +4,8 @@ using System.Linq; using System.Threading.Tasks; using API.Data; using API.DTOs.CollectionTags; -using API.Entities; using API.Entities.Metadata; using API.Extensions; -using API.Interfaces; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs index fd0c45456..d39985df6 100644 --- a/API/Controllers/DownloadController.cs +++ b/API/Controllers/DownloadController.cs @@ -4,12 +4,11 @@ using System.IO; using System.Linq; using System.Threading.Tasks; using API.Comparators; +using API.Data; using API.DTOs.Downloads; using API.Entities; using API.Entities.Enums; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using API.SignalR; using Kavita.Common; @@ -47,21 +46,21 @@ namespace API.Controllers public async Task> GetVolumeSize(int volumeId) { var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); - return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); } [HttpGet("chapter-size")] public async Task> GetChapterSize(int chapterId) { var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); - return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); } [HttpGet("series-size")] public async Task> GetSeriesSize(int seriesId) { var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); - return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath))); } [HttpGet("volume")] @@ -141,13 +140,13 @@ namespace API.Controllers var totalFilePaths = new List(); var tempFolder = $"download_{series.Id}_bookmarks"; - var fullExtractPath = Path.Join(DirectoryService.TempDirectory, tempFolder); - if (new DirectoryInfo(fullExtractPath).Exists) + var fullExtractPath = Path.Join(_directoryService.TempDirectory, tempFolder); + if (_directoryService.FileSystem.DirectoryInfo.FromDirectoryName(fullExtractPath).Exists) { return BadRequest( "Server is currently processing this exact download. Please try again in a few minutes."); } - DirectoryService.ExistOrCreate(fullExtractPath); + _directoryService.ExistOrCreate(fullExtractPath); var uniqueChapterIds = downloadBookmarkDto.Bookmarks.Select(b => b.ChapterId).Distinct().ToList(); @@ -160,16 +159,16 @@ namespace API.Controllers switch (series.Format) { case MangaFormat.Image: - DirectoryService.ExistOrCreate(chapterExtractPath); + _directoryService.ExistOrCreate(chapterExtractPath); _directoryService.CopyFilesToDirectory(mangaFiles.Select(f => f.FilePath), chapterExtractPath, $"{chapterId}_"); break; case MangaFormat.Archive: case MangaFormat.Pdf: _cacheService.ExtractChapterFiles(chapterExtractPath, mangaFiles.ToList()); - var originalFiles = DirectoryService.GetFilesWithExtension(chapterExtractPath, + var originalFiles = _directoryService.GetFilesWithExtension(chapterExtractPath, Parser.Parser.ImageFileExtensions); _directoryService.CopyFilesToDirectory(originalFiles, chapterExtractPath, $"{chapterId}_"); - DirectoryService.DeleteFiles(originalFiles); + _directoryService.DeleteFiles(originalFiles); break; case MangaFormat.Epub: return BadRequest("Series is not in a valid format."); @@ -177,7 +176,7 @@ namespace API.Controllers return BadRequest("Series is not in a valid format. Please rescan series and try again."); } - var files = DirectoryService.GetFilesWithExtension(chapterExtractPath, Parser.Parser.ImageFileExtensions); + var files = _directoryService.GetFilesWithExtension(chapterExtractPath, Parser.Parser.ImageFileExtensions); // Filter out images that aren't in bookmarks Array.Sort(files, _numericComparer); totalFilePaths.AddRange(files.Where((_, i) => chapterPages.Contains(i))); @@ -186,7 +185,7 @@ namespace API.Controllers var (fileBytes, _) = await _archiveService.CreateZipForDownload(totalFilePaths, tempFolder); - DirectoryService.ClearAndDeleteDirectory(fullExtractPath); + _directoryService.ClearAndDeleteDirectory(fullExtractPath); return File(fileBytes, DefaultContentType, $"{series.Name} - Bookmarks.zip"); } diff --git a/API/Controllers/FallbackController.cs b/API/Controllers/FallbackController.cs index ecd0315e2..ae8bad21f 100644 --- a/API/Controllers/FallbackController.cs +++ b/API/Controllers/FallbackController.cs @@ -1,5 +1,5 @@ using System.IO; -using API.Interfaces; +using API.Services; using Microsoft.AspNetCore.Mvc; namespace API.Controllers @@ -12,7 +12,7 @@ namespace API.Controllers public FallbackController(ITaskScheduler taskScheduler) { - // This is used to load TaskScheduler on startup without having to navigate to a Controller that uses. + // This is used to load TaskScheduler on startup without having to navigate to a Controller that uses. _taskScheduler = taskScheduler; } @@ -21,4 +21,4 @@ namespace API.Controllers return PhysicalFile(Path.Combine(Directory.GetCurrentDirectory(), "wwwroot", "index.html"), "text/HTML"); } } -} \ No newline at end of file +} diff --git a/API/Controllers/ImageController.cs b/API/Controllers/ImageController.cs index 88bafcff7..bbec90b23 100644 --- a/API/Controllers/ImageController.cs +++ b/API/Controllers/ImageController.cs @@ -1,7 +1,7 @@ using System.IO; using System.Threading.Tasks; +using API.Data; using API.Extensions; -using API.Interfaces; using API.Services; using Microsoft.AspNetCore.Mvc; @@ -13,11 +13,13 @@ namespace API.Controllers public class ImageController : BaseApiController { private readonly IUnitOfWork _unitOfWork; + private readonly IDirectoryService _directoryService; /// - public ImageController(IUnitOfWork unitOfWork) + public ImageController(IUnitOfWork unitOfWork, IDirectoryService directoryService) { _unitOfWork = unitOfWork; + _directoryService = directoryService; } /// @@ -28,12 +30,12 @@ namespace API.Controllers [HttpGet("chapter-cover")] public async Task GetChapterCoverImage(int chapterId) { - var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.ChapterRepository.GetChapterCoverImageAsync(chapterId)); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); - var format = Path.GetExtension(path).Replace(".", ""); + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.ChapterRepository.GetChapterCoverImageAsync(chapterId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); Response.AddCacheHeader(path); - return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); } /// @@ -44,12 +46,12 @@ namespace API.Controllers [HttpGet("volume-cover")] public async Task GetVolumeCoverImage(int volumeId) { - var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.VolumeRepository.GetVolumeCoverImageAsync(volumeId)); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); - var format = Path.GetExtension(path).Replace(".", ""); + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.VolumeRepository.GetVolumeCoverImageAsync(volumeId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); Response.AddCacheHeader(path); - return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); } /// @@ -60,12 +62,12 @@ namespace API.Controllers [HttpGet("series-cover")] public async Task GetSeriesCoverImage(int seriesId) { - var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.SeriesRepository.GetSeriesCoverImageAsync(seriesId)); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); - var format = Path.GetExtension(path).Replace(".", ""); + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.SeriesRepository.GetSeriesCoverImageAsync(seriesId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); Response.AddCacheHeader(path); - return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); } /// @@ -76,12 +78,12 @@ namespace API.Controllers [HttpGet("collection-cover")] public async Task GetCollectionCoverImage(int collectionTagId) { - var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.CollectionTagRepository.GetCoverImageAsync(collectionTagId)); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); - var format = Path.GetExtension(path).Replace(".", ""); + var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.CollectionTagRepository.GetCoverImageAsync(collectionTagId)); + if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image"); + var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", ""); Response.AddCacheHeader(path); - return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); + return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path)); } } } diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 15a8d1166..9cdd06158 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -3,13 +3,13 @@ using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Data; using API.Data.Repositories; using API.DTOs; using API.Entities; using API.Entities.Enums; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; +using API.Services; using AutoMapper; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; diff --git a/API/Controllers/OPDSController.cs b/API/Controllers/OPDSController.cs index d4b7917d0..26d036f16 100644 --- a/API/Controllers/OPDSController.cs +++ b/API/Controllers/OPDSController.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Threading.Tasks; using System.Xml.Serialization; using API.Comparators; +using API.Data; using API.DTOs; using API.DTOs.CollectionTags; using API.DTOs.Filtering; @@ -12,774 +13,771 @@ using API.DTOs.OPDS; using API.Entities; using API.Extensions; using API.Helpers; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using Kavita.Common; using Microsoft.AspNetCore.Mvc; -namespace API.Controllers +namespace API.Controllers; + +public class OpdsController : BaseApiController { - public class OpdsController : BaseApiController + private readonly IUnitOfWork _unitOfWork; + private readonly IDownloadService _downloadService; + private readonly IDirectoryService _directoryService; + private readonly ICacheService _cacheService; + private readonly IReaderService _readerService; + + + private readonly XmlSerializer _xmlSerializer; + private readonly XmlSerializer _xmlOpenSearchSerializer; + private const string Prefix = "/api/opds/"; + private readonly FilterDto _filterDto = new FilterDto() { - private readonly IUnitOfWork _unitOfWork; - private readonly IDownloadService _downloadService; - private readonly IDirectoryService _directoryService; - private readonly ICacheService _cacheService; - private readonly IReaderService _readerService; + MangaFormat = null + }; + private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); + public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService, + IDirectoryService directoryService, ICacheService cacheService, + IReaderService readerService) + { + _unitOfWork = unitOfWork; + _downloadService = downloadService; + _directoryService = directoryService; + _cacheService = cacheService; + _readerService = readerService; - private readonly XmlSerializer _xmlSerializer; - private readonly XmlSerializer _xmlOpenSearchSerializer; - private const string Prefix = "/api/opds/"; - private readonly FilterDto _filterDto = new FilterDto() + _xmlSerializer = new XmlSerializer(typeof(Feed)); + _xmlOpenSearchSerializer = new XmlSerializer(typeof(OpenSearchDescription)); + + } + + [HttpPost("{apiKey}")] + [HttpGet("{apiKey}")] + [Produces("application/xml")] + public async Task Get(string apiKey) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var feed = CreateFeed("Kavita", string.Empty, apiKey); + SetFeedId(feed, "root"); + feed.Entries.Add(new FeedEntry() { - MangaFormat = null + Id = "onDeck", + Title = "On Deck", + Content = new FeedEntryContent() + { + Text = "Browse by On Deck" + }, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/on-deck"), + } + }); + feed.Entries.Add(new FeedEntry() + { + Id = "recentlyAdded", + Title = "Recently Added", + Content = new FeedEntryContent() + { + Text = "Browse by Recently Added" + }, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/recently-added"), + } + }); + feed.Entries.Add(new FeedEntry() + { + Id = "readingList", + Title = "Reading Lists", + Content = new FeedEntryContent() + { + Text = "Browse by Reading Lists" + }, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/reading-list"), + } + }); + feed.Entries.Add(new FeedEntry() + { + Id = "allLibraries", + Title = "All Libraries", + Content = new FeedEntryContent() + { + Text = "Browse by Libraries" + }, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/libraries"), + } + }); + feed.Entries.Add(new FeedEntry() + { + Id = "allCollections", + Title = "All Collections", + Content = new FeedEntryContent() + { + Text = "Browse by Collections" + }, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/collections"), + } + }); + return CreateXmlResult(SerializeXml(feed)); + } + + + [HttpGet("{apiKey}/libraries")] + [Produces("application/xml")] + public async Task GetLibraries(string apiKey) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var libraries = await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId); + var feed = CreateFeed("All Libraries", $"{apiKey}/libraries", apiKey); + SetFeedId(feed, "libraries"); + foreach (var library in libraries) + { + feed.Entries.Add(new FeedEntry() + { + Id = library.Id.ToString(), + Title = library.Name, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/libraries/{library.Id}"), + } + }); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/collections")] + [Produces("application/xml")] + public async Task GetCollections(string apiKey) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user); + + IList tags = isAdmin ? (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()).ToList() + : (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync()).ToList(); + + + var feed = CreateFeed("All Collections", $"{apiKey}/collections", apiKey); + SetFeedId(feed, "collections"); + foreach (var tag in tags) + { + feed.Entries.Add(new FeedEntry() + { + Id = tag.Id.ToString(), + Title = tag.Title, + Summary = tag.Summary, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/collections/{tag.Id}"), + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/collection-cover?collectionId={tag.Id}"), + CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/collection-cover?collectionId={tag.Id}") + } + }); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + + [HttpGet("{apiKey}/collections/{collectionId}")] + [Produces("application/xml")] + public async Task GetCollection(int collectionId, string apiKey, [FromQuery] int pageNumber = 0) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user); + + IEnumerable tags; + if (isAdmin) + { + tags = await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); + } + else + { + tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); + } + + var tag = tags.SingleOrDefault(t => t.Id == collectionId); + if (tag == null) + { + return BadRequest("Collection does not exist or you don't have access"); + } + + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForCollectionAsync(collectionId, userId, new UserParams() + { + PageNumber = pageNumber, + PageSize = 20 + }); + + var feed = CreateFeed(tag.Title + " Collection", $"{apiKey}/collections/{collectionId}", apiKey); + SetFeedId(feed, $"collections-{collectionId}"); + AddPagination(feed, series, $"{Prefix}{apiKey}/collections/{collectionId}"); + + foreach (var seriesDto in series) + { + feed.Entries.Add(CreateSeries(seriesDto, apiKey)); + } + + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/reading-list")] + [Produces("application/xml")] + public async Task GetReadingLists(string apiKey, [FromQuery] int pageNumber = 0) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + + var readingLists = await _unitOfWork.ReadingListRepository.GetReadingListDtosForUserAsync(userId, true, new UserParams() + { + PageNumber = pageNumber + }); + + + var feed = CreateFeed("All Reading Lists", $"{apiKey}/reading-list", apiKey); + SetFeedId(feed, "reading-list"); + foreach (var readingListDto in readingLists) + { + feed.Entries.Add(new FeedEntry() + { + Id = readingListDto.Id.ToString(), + Title = readingListDto.Title, + Summary = readingListDto.Summary, + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/reading-list/{readingListDto.Id}"), + } + }); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/reading-list/{readingListId}")] + [Produces("application/xml")] + public async Task GetReadingListItems(int readingListId, string apiKey) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + + var userWithLists = await _unitOfWork.UserRepository.GetUserWithReadingListsByUsernameAsync(user.UserName); + var readingList = userWithLists.ReadingLists.SingleOrDefault(t => t.Id == readingListId); + if (readingList == null) + { + return BadRequest("Reading list does not exist or you don't have access"); + } + + var feed = CreateFeed(readingList.Title + " Reading List", $"{apiKey}/reading-list/{readingListId}", apiKey); + SetFeedId(feed, $"reading-list-{readingListId}"); + + var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId)).ToList(); + foreach (var item in items) + { + feed.Entries.Add(new FeedEntry() + { + Id = item.ChapterId.ToString(), + Title = $"{item.SeriesName} Chapter {item.ChapterNumber}", + Links = new List() + { + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{item.SeriesId}/volume/{item.VolumeId}/chapter/{item.ChapterId}"), + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={item.ChapterId}") + } + }); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/libraries/{libraryId}")] + [Produces("application/xml")] + public async Task GetSeriesForLibrary(int libraryId, string apiKey, [FromQuery] int pageNumber = 0) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var library = + (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId)).SingleOrDefault(l => + l.Id == libraryId); + if (library == null) + { + return BadRequest("User does not have access to this library"); + } + + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, new UserParams() + { + PageNumber = pageNumber, + PageSize = 20 + }, _filterDto); + + var feed = CreateFeed(library.Name, $"{apiKey}/libraries/{libraryId}", apiKey); + SetFeedId(feed, $"library-{library.Name}"); + AddPagination(feed, series, $"{Prefix}{apiKey}/libraries/{libraryId}"); + + foreach (var seriesDto in series) + { + feed.Entries.Add(CreateSeries(seriesDto, apiKey)); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/recently-added")] + [Produces("application/xml")] + public async Task GetRecentlyAdded(string apiKey, [FromQuery] int pageNumber = 1) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var recentlyAdded = await _unitOfWork.SeriesRepository.GetRecentlyAdded(0, userId, new UserParams() + { + PageNumber = pageNumber, + PageSize = 20 + }, _filterDto); + + var feed = CreateFeed("Recently Added", $"{apiKey}/recently-added", apiKey); + SetFeedId(feed, "recently-added"); + AddPagination(feed, recentlyAdded, $"{Prefix}{apiKey}/recently-added"); + + foreach (var seriesDto in recentlyAdded) + { + feed.Entries.Add(CreateSeries(seriesDto, apiKey)); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/on-deck")] + [Produces("application/xml")] + public async Task GetOnDeck(string apiKey, [FromQuery] int pageNumber = 1) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var userParams = new UserParams() + { + PageNumber = pageNumber, + PageSize = 20 }; - private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); + var results = await _unitOfWork.SeriesRepository.GetOnDeck(userId, 0, userParams, _filterDto); + var listResults = results.DistinctBy(s => s.Name).Skip((userParams.PageNumber - 1) * userParams.PageSize) + .Take(userParams.PageSize).ToList(); + var pagedList = new PagedList(listResults, listResults.Count, userParams.PageNumber, userParams.PageSize); - public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService, - IDirectoryService directoryService, ICacheService cacheService, - IReaderService readerService) + Response.AddPaginationHeader(pagedList.CurrentPage, pagedList.PageSize, pagedList.TotalCount, pagedList.TotalPages); + + var feed = CreateFeed("On Deck", $"{apiKey}/on-deck", apiKey); + SetFeedId(feed, "on-deck"); + AddPagination(feed, pagedList, $"{Prefix}{apiKey}/on-deck"); + + foreach (var seriesDto in pagedList) { - _unitOfWork = unitOfWork; - _downloadService = downloadService; - _directoryService = directoryService; - _cacheService = cacheService; - _readerService = readerService; - - _xmlSerializer = new XmlSerializer(typeof(Feed)); - _xmlOpenSearchSerializer = new XmlSerializer(typeof(OpenSearchDescription)); - + feed.Entries.Add(CreateSeries(seriesDto, apiKey)); } - [HttpPost("{apiKey}")] - [HttpGet("{apiKey}")] - [Produces("application/xml")] - public async Task Get(string apiKey) + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/series")] + [Produces("application/xml")] + public async Task SearchSeries(string apiKey, [FromQuery] string query) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + if (string.IsNullOrEmpty(query)) + { + return BadRequest("You must pass a query parameter"); + } + query = query.Replace(@"%", ""); + // Get libraries user has access to + var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId)).ToList(); + + if (!libraries.Any()) return BadRequest("User does not have access to any libraries"); + + var series = await _unitOfWork.SeriesRepository.SearchSeries(libraries.Select(l => l.Id).ToArray(), query); + + var feed = CreateFeed(query, $"{apiKey}/series?query=" + query, apiKey); + SetFeedId(feed, "search-series"); + foreach (var seriesDto in series) + { + feed.Entries.Add(CreateSeries(seriesDto, apiKey)); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + private static void SetFeedId(Feed feed, string id) + { + feed.Id = id; + } + + [HttpGet("{apiKey}/search")] + [Produces("application/xml")] + public async Task GetSearchDescriptor(string apiKey) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var feed = new OpenSearchDescription() + { + ShortName = "Search", + Description = "Search for Series", + Url = new SearchLink() + { + Type = FeedLinkType.AtomAcquisition, + Template = $"{Prefix}{apiKey}/series?query=" + "{searchTerms}" + } + }; + + await using var sm = new StringWriter(); + _xmlOpenSearchSerializer.Serialize(sm, feed); + + return CreateXmlResult(sm.ToString().Replace("utf-16", "utf-8")); + } + + [HttpGet("{apiKey}/series/{seriesId}")] + [Produces("application/xml")] + public async Task GetSeries(string apiKey, int seriesId) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); + var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId); + var feed = CreateFeed(series.Name + " - Volumes", $"{apiKey}/series/{series.Id}", apiKey); + SetFeedId(feed, $"series-{series.Id}"); + feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesId}")); + foreach (var volumeDto in volumes) + { + feed.Entries.Add(CreateVolume(volumeDto, seriesId, apiKey)); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}")] + [Produces("application/xml")] + public async Task GetVolume(string apiKey, int seriesId, int volumeId) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); + var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId); + var chapters = + (await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number), + _chapterSortComparer); + + var feed = CreateFeed(series.Name + " - Volume " + volume.Name + " - Chapters ", $"{apiKey}/series/{seriesId}/volume/{volumeId}", apiKey); + SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-chapters"); + foreach (var chapter in chapters) { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var feed = CreateFeed("Kavita", string.Empty, apiKey); - SetFeedId(feed, "root"); feed.Entries.Add(new FeedEntry() { - Id = "onDeck", - Title = "On Deck", - Content = new FeedEntryContent() - { - Text = "Browse by On Deck" - }, + Id = chapter.Id.ToString(), + Title = "Chapter " + chapter.Number, Links = new List() { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/on-deck"), + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapter.Id}"), + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={chapter.Id}") } }); - feed.Entries.Add(new FeedEntry() - { - Id = "recentlyAdded", - Title = "Recently Added", - Content = new FeedEntryContent() - { - Text = "Browse by Recently Added" - }, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/recently-added"), - } - }); - feed.Entries.Add(new FeedEntry() - { - Id = "readingList", - Title = "Reading Lists", - Content = new FeedEntryContent() - { - Text = "Browse by Reading Lists" - }, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/reading-list"), - } - }); - feed.Entries.Add(new FeedEntry() - { - Id = "allLibraries", - Title = "All Libraries", - Content = new FeedEntryContent() - { - Text = "Browse by Libraries" - }, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/libraries"), - } - }); - feed.Entries.Add(new FeedEntry() - { - Id = "allCollections", - Title = "All Collections", - Content = new FeedEntryContent() - { - Text = "Browse by Collections" - }, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/collections"), - } - }); - return CreateXmlResult(SerializeXml(feed)); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + [HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}")] + [Produces("application/xml")] + public async Task GetChapter(string apiKey, int seriesId, int volumeId, int chapterId) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var userId = await GetUser(apiKey); + var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); + var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId); + var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId); + var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); + + var feed = CreateFeed(series.Name + " - Volume " + volume.Name + " - Chapters ", $"{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}", apiKey); + SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-chapter-{chapter.Id}-files"); + foreach (var mangaFile in files) + { + feed.Entries.Add(CreateChapter(seriesId, volumeId, chapterId, mangaFile, series, volume, chapter, apiKey)); + } + + return CreateXmlResult(SerializeXml(feed)); + } + + /// + /// Downloads a file + /// + /// + /// + /// + /// Not used. Only for Chunky to allow download links + /// + [HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}/download/{filename}")] + public async Task DownloadFile(string apiKey, int seriesId, int volumeId, int chapterId, string filename) + { + if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) + return BadRequest("OPDS is not enabled on this server"); + var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); + var (bytes, contentType, fileDownloadName) = await _downloadService.GetFirstFileDownload(files); + return File(bytes, contentType, fileDownloadName); + } + + private static ContentResult CreateXmlResult(string xml) + { + return new ContentResult + { + ContentType = "application/xml", + Content = xml, + StatusCode = 200 + }; + } + + private static void AddPagination(Feed feed, PagedList list, string href) + { + var url = href; + if (href.Contains("?")) + { + url += "&"; + } + else + { + url += "?"; + } + + var pageNumber = Math.Max(list.CurrentPage, 1); + + if (pageNumber > 1) + { + feed.Links.Add(CreateLink(FeedLinkRelation.Prev, FeedLinkType.AtomNavigation, url + "pageNumber=" + (pageNumber - 1))); + } + + if (pageNumber + 1 <= list.TotalPages) + { + feed.Links.Add(CreateLink(FeedLinkRelation.Next, FeedLinkType.AtomNavigation, url + "pageNumber=" + (pageNumber + 1))); + } + + // Update self to point to current page + var selfLink = feed.Links.SingleOrDefault(l => l.Rel == FeedLinkRelation.Self); + if (selfLink != null) + { + selfLink.Href = url + "pageNumber=" + pageNumber; } - [HttpGet("{apiKey}/libraries")] - [Produces("application/xml")] - public async Task GetLibraries(string apiKey) + feed.Total = list.TotalCount; + feed.ItemsPerPage = list.PageSize; + feed.StartIndex = (Math.Max(list.CurrentPage - 1, 0) * list.PageSize) + 1; + } + + private static FeedEntry CreateSeries(SeriesDto seriesDto, string apiKey) + { + return new FeedEntry() { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var libraries = await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId); - var feed = CreateFeed("All Libraries", $"{apiKey}/libraries", apiKey); - SetFeedId(feed, "libraries"); - foreach (var library in libraries) + Id = seriesDto.Id.ToString(), + Title = $"{seriesDto.Name} ({seriesDto.Format})", + Summary = seriesDto.Summary, + Links = new List() { - feed.Entries.Add(new FeedEntry() - { - Id = library.Id.ToString(), - Title = library.Name, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/libraries/{library.Id}"), - } - }); + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{seriesDto.Id}"), + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesDto.Id}"), + CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesDto.Id}") } + }; + } - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/collections")] - [Produces("application/xml")] - public async Task GetCollections(string apiKey) + private static FeedEntry CreateSeries(SearchResultDto searchResultDto, string apiKey) + { + return new FeedEntry() { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); - var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user); - - IList tags = isAdmin ? (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()).ToList() - : (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync()).ToList(); - - - var feed = CreateFeed("All Collections", $"{apiKey}/collections", apiKey); - SetFeedId(feed, "collections"); - foreach (var tag in tags) + Id = searchResultDto.SeriesId.ToString(), + Title = $"{searchResultDto.Name} ({searchResultDto.Format})", + Links = new List() { - feed.Entries.Add(new FeedEntry() - { - Id = tag.Id.ToString(), - Title = tag.Title, - Summary = tag.Summary, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/collections/{tag.Id}"), - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/collection-cover?collectionId={tag.Id}"), - CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/collection-cover?collectionId={tag.Id}") - } - }); + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{searchResultDto.SeriesId}"), + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={searchResultDto.SeriesId}"), + CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/series-cover?seriesId={searchResultDto.SeriesId}") } + }; + } - return CreateXmlResult(SerializeXml(feed)); - } - - - [HttpGet("{apiKey}/collections/{collectionId}")] - [Produces("application/xml")] - public async Task GetCollection(int collectionId, string apiKey, [FromQuery] int pageNumber = 0) + private static FeedEntry CreateVolume(VolumeDto volumeDto, int seriesId, string apiKey) + { + return new FeedEntry() { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); - var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user); - - IEnumerable tags; - if (isAdmin) + Id = volumeDto.Id.ToString(), + Title = "Volume " + volumeDto.Name, + Links = new List() { - tags = await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); - } - else - { - tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); + CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{seriesId}/volume/{volumeDto.Id}"), + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/volume-cover?volumeId={volumeDto.Id}"), + CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/volume-cover?volumeId={volumeDto.Id}") } + }; + } - var tag = tags.SingleOrDefault(t => t.Id == collectionId); - if (tag == null) - { - return BadRequest("Collection does not exist or you don't have access"); - } - - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForCollectionAsync(collectionId, userId, new UserParams() - { - PageNumber = pageNumber, - PageSize = 20 - }); - - var feed = CreateFeed(tag.Title + " Collection", $"{apiKey}/collections/{collectionId}", apiKey); - SetFeedId(feed, $"collections-{collectionId}"); - AddPagination(feed, series, $"{Prefix}{apiKey}/collections/{collectionId}"); - - foreach (var seriesDto in series) - { - feed.Entries.Add(CreateSeries(seriesDto, apiKey)); - } - - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/reading-list")] - [Produces("application/xml")] - public async Task GetReadingLists(string apiKey, [FromQuery] int pageNumber = 0) + private FeedEntry CreateChapter(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, SeriesDto series, Volume volume, ChapterDto chapter, string apiKey) + { + var fileSize = + DirectoryService.GetHumanReadableBytes(_directoryService.GetTotalSize(new List() + {mangaFile.FilePath})); + var fileType = _downloadService.GetContentTypeFromFile(mangaFile.FilePath); + var filename = Uri.EscapeDataString(Path.GetFileName(mangaFile.FilePath) ?? string.Empty); + return new FeedEntry() { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - - var readingLists = await _unitOfWork.ReadingListRepository.GetReadingListDtosForUserAsync(userId, true, new UserParams() + Id = mangaFile.Id.ToString(), + Title = $"{series.Name} - Volume {volume.Name} - Chapter {chapter.Number}", + Extent = fileSize, + Summary = $"{fileType.Split("/")[1]} - {fileSize}", + Format = mangaFile.Format.ToString(), + Links = new List() { - PageNumber = pageNumber - }); - - - var feed = CreateFeed("All Reading Lists", $"{apiKey}/reading-list", apiKey); - SetFeedId(feed, "reading-list"); - foreach (var readingListDto in readingLists) + CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={chapterId}"), + CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={chapterId}"), + // Chunky requires a file at the end. Our API ignores this + CreateLink(FeedLinkRelation.Acquisition, fileType, $"{Prefix}{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}/download/{filename}"), + CreatePageStreamLink(seriesId, volumeId, chapterId, mangaFile, apiKey) + }, + Content = new FeedEntryContent() { - feed.Entries.Add(new FeedEntry() - { - Id = readingListDto.Id.ToString(), - Title = readingListDto.Title, - Summary = readingListDto.Summary, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/reading-list/{readingListDto.Id}"), - } - }); + Text = fileType, + Type = "text" } + }; + } - return CreateXmlResult(SerializeXml(feed)); - } + [HttpGet("{apiKey}/image")] + public async Task GetPageStreamedImage(string apiKey, [FromQuery] int seriesId, [FromQuery] int volumeId,[FromQuery] int chapterId, [FromQuery] int pageNumber) + { + if (pageNumber < 0) return BadRequest("Page cannot be less than 0"); + var chapter = await _cacheService.Ensure(chapterId); + if (chapter == null) return BadRequest("There was an issue finding image file for reading"); - [HttpGet("{apiKey}/reading-list/{readingListId}")] - [Produces("application/xml")] - public async Task GetReadingListItems(int readingListId, string apiKey) + try { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); + var path = _cacheService.GetCachedPagePath(chapter, pageNumber); + if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {pageNumber}"); - var userWithLists = await _unitOfWork.UserRepository.GetUserWithReadingListsByUsernameAsync(user.UserName); - var readingList = userWithLists.ReadingLists.SingleOrDefault(t => t.Id == readingListId); - if (readingList == null) - { - return BadRequest("Reading list does not exist or you don't have access"); - } - - var feed = CreateFeed(readingList.Title + " Reading List", $"{apiKey}/reading-list/{readingListId}", apiKey); - SetFeedId(feed, $"reading-list-{readingListId}"); - - var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId)).ToList(); - foreach (var item in items) - { - feed.Entries.Add(new FeedEntry() - { - Id = item.ChapterId.ToString(), - Title = $"{item.SeriesName} Chapter {item.ChapterNumber}", - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{item.SeriesId}/volume/{item.VolumeId}/chapter/{item.ChapterId}"), - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={item.ChapterId}") - } - }); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/libraries/{libraryId}")] - [Produces("application/xml")] - public async Task GetSeriesForLibrary(int libraryId, string apiKey, [FromQuery] int pageNumber = 0) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var library = - (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId)).SingleOrDefault(l => - l.Id == libraryId); - if (library == null) - { - return BadRequest("User does not have access to this library"); - } - - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, new UserParams() - { - PageNumber = pageNumber, - PageSize = 20 - }, _filterDto); - - var feed = CreateFeed(library.Name, $"{apiKey}/libraries/{libraryId}", apiKey); - SetFeedId(feed, $"library-{library.Name}"); - AddPagination(feed, series, $"{Prefix}{apiKey}/libraries/{libraryId}"); - - foreach (var seriesDto in series) - { - feed.Entries.Add(CreateSeries(seriesDto, apiKey)); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/recently-added")] - [Produces("application/xml")] - public async Task GetRecentlyAdded(string apiKey, [FromQuery] int pageNumber = 1) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var recentlyAdded = await _unitOfWork.SeriesRepository.GetRecentlyAdded(0, userId, new UserParams() - { - PageNumber = pageNumber, - PageSize = 20 - }, _filterDto); - - var feed = CreateFeed("Recently Added", $"{apiKey}/recently-added", apiKey); - SetFeedId(feed, "recently-added"); - AddPagination(feed, recentlyAdded, $"{Prefix}{apiKey}/recently-added"); - - foreach (var seriesDto in recentlyAdded) - { - feed.Entries.Add(CreateSeries(seriesDto, apiKey)); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/on-deck")] - [Produces("application/xml")] - public async Task GetOnDeck(string apiKey, [FromQuery] int pageNumber = 1) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var userParams = new UserParams() - { - PageNumber = pageNumber, - PageSize = 20 - }; - var results = await _unitOfWork.SeriesRepository.GetOnDeck(userId, 0, userParams, _filterDto); - var listResults = results.DistinctBy(s => s.Name).Skip((userParams.PageNumber - 1) * userParams.PageSize) - .Take(userParams.PageSize).ToList(); - var pagedList = new PagedList(listResults, listResults.Count, userParams.PageNumber, userParams.PageSize); - - Response.AddPaginationHeader(pagedList.CurrentPage, pagedList.PageSize, pagedList.TotalCount, pagedList.TotalPages); - - var feed = CreateFeed("On Deck", $"{apiKey}/on-deck", apiKey); - SetFeedId(feed, "on-deck"); - AddPagination(feed, pagedList, $"{Prefix}{apiKey}/on-deck"); - - foreach (var seriesDto in pagedList) - { - feed.Entries.Add(CreateSeries(seriesDto, apiKey)); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/series")] - [Produces("application/xml")] - public async Task SearchSeries(string apiKey, [FromQuery] string query) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - if (string.IsNullOrEmpty(query)) - { - return BadRequest("You must pass a query parameter"); - } - query = query.Replace(@"%", ""); - // Get libraries user has access to - var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId)).ToList(); - - if (!libraries.Any()) return BadRequest("User does not have access to any libraries"); - - var series = await _unitOfWork.SeriesRepository.SearchSeries(libraries.Select(l => l.Id).ToArray(), query); - - var feed = CreateFeed(query, $"{apiKey}/series?query=" + query, apiKey); - SetFeedId(feed, "search-series"); - foreach (var seriesDto in series) - { - feed.Entries.Add(CreateSeries(seriesDto, apiKey)); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - private static void SetFeedId(Feed feed, string id) - { - feed.Id = id; - } - - [HttpGet("{apiKey}/search")] - [Produces("application/xml")] - public async Task GetSearchDescriptor(string apiKey) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var feed = new OpenSearchDescription() - { - ShortName = "Search", - Description = "Search for Series", - Url = new SearchLink() - { - Type = FeedLinkType.AtomAcquisition, - Template = $"{Prefix}{apiKey}/series?query=" + "{searchTerms}" - } - }; - - await using var sm = new StringWriter(); - _xmlOpenSearchSerializer.Serialize(sm, feed); - - return CreateXmlResult(sm.ToString().Replace("utf-16", "utf-8")); - } - - [HttpGet("{apiKey}/series/{seriesId}")] - [Produces("application/xml")] - public async Task GetSeries(string apiKey, int seriesId) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); - var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId); - var feed = CreateFeed(series.Name + " - Volumes", $"{apiKey}/series/{series.Id}", apiKey); - SetFeedId(feed, $"series-{series.Id}"); - feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesId}")); - foreach (var volumeDto in volumes) - { - feed.Entries.Add(CreateVolume(volumeDto, seriesId, apiKey)); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}")] - [Produces("application/xml")] - public async Task GetVolume(string apiKey, int seriesId, int volumeId) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); - var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId); - var chapters = - (await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number), - _chapterSortComparer); - - var feed = CreateFeed(series.Name + " - Volume " + volume.Name + " - Chapters ", $"{apiKey}/series/{seriesId}/volume/{volumeId}", apiKey); - SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-chapters"); - foreach (var chapter in chapters) - { - feed.Entries.Add(new FeedEntry() - { - Id = chapter.Id.ToString(), - Title = "Chapter " + chapter.Number, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapter.Id}"), - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={chapter.Id}") - } - }); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - [HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}")] - [Produces("application/xml")] - public async Task GetChapter(string apiKey, int seriesId, int volumeId, int chapterId) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var userId = await GetUser(apiKey); - var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); - var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId); - var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId); - var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); - - var feed = CreateFeed(series.Name + " - Volume " + volume.Name + " - Chapters ", $"{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}", apiKey); - SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-chapter-{chapter.Id}-files"); - foreach (var mangaFile in files) - { - feed.Entries.Add(CreateChapter(seriesId, volumeId, chapterId, mangaFile, series, volume, chapter, apiKey)); - } - - return CreateXmlResult(SerializeXml(feed)); - } - - /// - /// Downloads a file - /// - /// - /// - /// - /// Not used. Only for Chunky to allow download links - /// - [HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}/download/{filename}")] - public async Task DownloadFile(string apiKey, int seriesId, int volumeId, int chapterId, string filename) - { - if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds) - return BadRequest("OPDS is not enabled on this server"); - var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); - var (bytes, contentType, fileDownloadName) = await _downloadService.GetFirstFileDownload(files); - return File(bytes, contentType, fileDownloadName); - } - - private static ContentResult CreateXmlResult(string xml) - { - return new ContentResult - { - ContentType = "application/xml", - Content = xml, - StatusCode = 200 - }; - } - - private static void AddPagination(Feed feed, PagedList list, string href) - { - var url = href; - if (href.Contains("?")) - { - url += "&"; - } - else - { - url += "?"; - } - - var pageNumber = Math.Max(list.CurrentPage, 1); - - if (pageNumber > 1) - { - feed.Links.Add(CreateLink(FeedLinkRelation.Prev, FeedLinkType.AtomNavigation, url + "pageNumber=" + (pageNumber - 1))); - } - - if (pageNumber + 1 <= list.TotalPages) - { - feed.Links.Add(CreateLink(FeedLinkRelation.Next, FeedLinkType.AtomNavigation, url + "pageNumber=" + (pageNumber + 1))); - } - - // Update self to point to current page - var selfLink = feed.Links.SingleOrDefault(l => l.Rel == FeedLinkRelation.Self); - if (selfLink != null) - { - selfLink.Href = url + "pageNumber=" + pageNumber; - } - - - feed.Total = list.TotalCount; - feed.ItemsPerPage = list.PageSize; - feed.StartIndex = (Math.Max(list.CurrentPage - 1, 0) * list.PageSize) + 1; - } - - private static FeedEntry CreateSeries(SeriesDto seriesDto, string apiKey) - { - return new FeedEntry() - { - Id = seriesDto.Id.ToString(), - Title = $"{seriesDto.Name} ({seriesDto.Format})", - Summary = seriesDto.Summary, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{seriesDto.Id}"), - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesDto.Id}"), - CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesDto.Id}") - } - }; - } - - private static FeedEntry CreateSeries(SearchResultDto searchResultDto, string apiKey) - { - return new FeedEntry() - { - Id = searchResultDto.SeriesId.ToString(), - Title = $"{searchResultDto.Name} ({searchResultDto.Format})", - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{searchResultDto.SeriesId}"), - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={searchResultDto.SeriesId}"), - CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/series-cover?seriesId={searchResultDto.SeriesId}") - } - }; - } - - private static FeedEntry CreateVolume(VolumeDto volumeDto, int seriesId, string apiKey) - { - return new FeedEntry() - { - Id = volumeDto.Id.ToString(), - Title = "Volume " + volumeDto.Name, - Links = new List() - { - CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{seriesId}/volume/{volumeDto.Id}"), - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/volume-cover?volumeId={volumeDto.Id}"), - CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/volume-cover?volumeId={volumeDto.Id}") - } - }; - } - - private FeedEntry CreateChapter(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, SeriesDto series, Volume volume, ChapterDto chapter, string apiKey) - { - var fileSize = - DirectoryService.GetHumanReadableBytes(DirectoryService.GetTotalSize(new List() - {mangaFile.FilePath})); - var fileType = _downloadService.GetContentTypeFromFile(mangaFile.FilePath); - var filename = Uri.EscapeDataString(Path.GetFileName(mangaFile.FilePath) ?? string.Empty); - return new FeedEntry() - { - Id = mangaFile.Id.ToString(), - Title = $"{series.Name} - Volume {volume.Name} - Chapter {chapter.Number}", - Extent = fileSize, - Summary = $"{fileType.Split("/")[1]} - {fileSize}", - Format = mangaFile.Format.ToString(), - Links = new List() - { - CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={chapterId}"), - CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"/api/image/chapter-cover?chapterId={chapterId}"), - // Chunky requires a file at the end. Our API ignores this - CreateLink(FeedLinkRelation.Acquisition, fileType, $"{Prefix}{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}/download/{filename}"), - CreatePageStreamLink(seriesId, volumeId, chapterId, mangaFile, apiKey) - }, - Content = new FeedEntryContent() - { - Text = fileType, - Type = "text" - } - }; - } - - [HttpGet("{apiKey}/image")] - public async Task GetPageStreamedImage(string apiKey, [FromQuery] int seriesId, [FromQuery] int volumeId,[FromQuery] int chapterId, [FromQuery] int pageNumber) - { - if (pageNumber < 0) return BadRequest("Page cannot be less than 0"); - var chapter = await _cacheService.Ensure(chapterId); - if (chapter == null) return BadRequest("There was an issue finding image file for reading"); - - try - { - var (path, _) = await _cacheService.GetCachedPagePath(chapter, pageNumber); - if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {pageNumber}"); - - var content = await _directoryService.ReadFileAsync(path); - var format = Path.GetExtension(path).Replace(".", ""); - - // Calculates SHA1 Hash for byte[] - Response.AddCacheHeader(content); - - // Save progress for the user - await _readerService.SaveReadingProgress(new ProgressDto() - { - ChapterId = chapterId, - PageNum = pageNumber, - SeriesId = seriesId, - VolumeId = volumeId - }, await GetUser(apiKey)); - - return File(content, "image/" + format); - } - catch (Exception) - { - _cacheService.CleanupChapters(new []{ chapterId }); - throw; - } - } - - [HttpGet("{apiKey}/favicon")] - public async Task GetFavicon(string apiKey) - { - var files = DirectoryService.GetFilesWithExtension(Path.Join(Directory.GetCurrentDirectory(), ".."), @"\.ico"); - if (files.Length == 0) return BadRequest("Cannot find icon"); - var path = files[0]; var content = await _directoryService.ReadFileAsync(path); var format = Path.GetExtension(path).Replace(".", ""); // Calculates SHA1 Hash for byte[] Response.AddCacheHeader(content); + // Save progress for the user + await _readerService.SaveReadingProgress(new ProgressDto() + { + ChapterId = chapterId, + PageNum = pageNumber, + SeriesId = seriesId, + VolumeId = volumeId + }, await GetUser(apiKey)); + return File(content, "image/" + format); } - - /// - /// Gets the user from the API key - /// - /// - private async Task GetUser(string apiKey) + catch (Exception) { - try - { - var user = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); - return user; - } - catch - { - /* Do nothing */ - } - throw new KavitaException("User does not exist"); - } - - private static FeedLink CreatePageStreamLink(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, string apiKey) - { - var link = CreateLink(FeedLinkRelation.Stream, "image/jpeg", $"{Prefix}{apiKey}/image?seriesId={seriesId}&volumeId={volumeId}&chapterId={chapterId}&pageNumber=" + "{pageNumber}"); - link.TotalPages = mangaFile.Pages; - return link; - } - - private static FeedLink CreateLink(string rel, string type, string href) - { - return new FeedLink() - { - Rel = rel, - Href = href, - Type = type - }; - } - - private static Feed CreateFeed(string title, string href, string apiKey) - { - var link = CreateLink(FeedLinkRelation.Self, string.IsNullOrEmpty(href) ? - FeedLinkType.AtomNavigation : - FeedLinkType.AtomAcquisition, Prefix + href); - - return new Feed() - { - Title = title, - Icon = Prefix + $"{apiKey}/favicon", - Links = new List() - { - link, - CreateLink(FeedLinkRelation.Start, FeedLinkType.AtomNavigation, Prefix + apiKey), - CreateLink(FeedLinkRelation.Search, FeedLinkType.AtomSearch, Prefix + $"{apiKey}/search") - }, - }; - } - - private string SerializeXml(Feed feed) - { - if (feed == null) return string.Empty; - using var sm = new StringWriter(); - _xmlSerializer.Serialize(sm, feed); - return sm.ToString().Replace("utf-16", "utf-8"); // Chunky cannot accept UTF-16 feeds + _cacheService.CleanupChapters(new []{ chapterId }); + throw; } } + + [HttpGet("{apiKey}/favicon")] + public async Task GetFavicon(string apiKey) + { + var files = _directoryService.GetFilesWithExtension(Path.Join(Directory.GetCurrentDirectory(), ".."), @"\.ico"); + if (files.Length == 0) return BadRequest("Cannot find icon"); + var path = files[0]; + var content = await _directoryService.ReadFileAsync(path); + var format = Path.GetExtension(path).Replace(".", ""); + + // Calculates SHA1 Hash for byte[] + Response.AddCacheHeader(content); + + return File(content, "image/" + format); + } + + /// + /// Gets the user from the API key + /// + /// + private async Task GetUser(string apiKey) + { + try + { + var user = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey); + return user; + } + catch + { + /* Do nothing */ + } + throw new KavitaException("User does not exist"); + } + + private static FeedLink CreatePageStreamLink(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, string apiKey) + { + var link = CreateLink(FeedLinkRelation.Stream, "image/jpeg", $"{Prefix}{apiKey}/image?seriesId={seriesId}&volumeId={volumeId}&chapterId={chapterId}&pageNumber=" + "{pageNumber}"); + link.TotalPages = mangaFile.Pages; + return link; + } + + private static FeedLink CreateLink(string rel, string type, string href) + { + return new FeedLink() + { + Rel = rel, + Href = href, + Type = type + }; + } + + private static Feed CreateFeed(string title, string href, string apiKey) + { + var link = CreateLink(FeedLinkRelation.Self, string.IsNullOrEmpty(href) ? + FeedLinkType.AtomNavigation : + FeedLinkType.AtomAcquisition, Prefix + href); + + return new Feed() + { + Title = title, + Icon = Prefix + $"{apiKey}/favicon", + Links = new List() + { + link, + CreateLink(FeedLinkRelation.Start, FeedLinkType.AtomNavigation, Prefix + apiKey), + CreateLink(FeedLinkRelation.Search, FeedLinkType.AtomSearch, Prefix + $"{apiKey}/search") + }, + }; + } + + private string SerializeXml(Feed feed) + { + if (feed == null) return string.Empty; + using var sm = new StringWriter(); + _xmlSerializer.Serialize(sm, feed); + return sm.ToString().Replace("utf-16", "utf-8"); // Chunky cannot accept UTF-16 feeds + } } diff --git a/API/Controllers/PluginController.cs b/API/Controllers/PluginController.cs index b176c0628..5f2d99ba3 100644 --- a/API/Controllers/PluginController.cs +++ b/API/Controllers/PluginController.cs @@ -1,7 +1,7 @@ using System.Threading.Tasks; +using API.Data; using API.DTOs; -using API.Interfaces; -using API.Interfaces.Services; +using API.Services; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 85eb9139f..9081cef37 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -3,13 +3,13 @@ using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Data; using API.Data.Repositories; using API.DTOs; using API.DTOs.Reader; using API.Entities; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; +using API.Services; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; @@ -50,7 +50,7 @@ namespace API.Controllers try { - var (path, _) = await _cacheService.GetCachedPagePath(chapter, page); + var path = _cacheService.GetCachedPagePath(chapter, page); if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); var format = Path.GetExtension(path).Replace(".", ""); @@ -90,7 +90,7 @@ namespace API.Controllers LibraryId = dto.LibraryId, IsSpecial = dto.IsSpecial, Pages = dto.Pages, - ChapterTitle = dto.ChapterTitle + ChapterTitle = dto.ChapterTitle ?? string.Empty }); } diff --git a/API/Controllers/ReadingListController.cs b/API/Controllers/ReadingListController.cs index e4f781f7b..9391105cb 100644 --- a/API/Controllers/ReadingListController.cs +++ b/API/Controllers/ReadingListController.cs @@ -2,11 +2,11 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; +using API.Data; using API.DTOs.ReadingLists; using API.Entities; using API.Extensions; using API.Helpers; -using API.Interfaces; using Microsoft.AspNetCore.Mvc; namespace API.Controllers diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index 15438c70e..d2fdbce42 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -6,11 +6,10 @@ using API.Data; using API.Data.Repositories; using API.DTOs; using API.DTOs.Filtering; -using API.DTOs.Metadata; using API.Entities; using API.Extensions; using API.Helpers; -using API.Interfaces; +using API.Services; using API.SignalR; using Kavita.Common; using Microsoft.AspNetCore.Authorization; diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 35755a48f..45fb22ce5 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -5,7 +5,8 @@ using System.Threading.Tasks; using API.DTOs.Stats; using API.DTOs.Update; using API.Extensions; -using API.Interfaces.Services; +using API.Services; +using API.Services.Tasks; using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; @@ -26,10 +27,11 @@ namespace API.Controllers private readonly ICacheService _cacheService; private readonly IVersionUpdaterService _versionUpdaterService; private readonly IStatsService _statsService; + private readonly ICleanupService _cleanupService; public ServerController(IHostApplicationLifetime applicationLifetime, ILogger logger, IConfiguration config, IBackupService backupService, IArchiveService archiveService, ICacheService cacheService, - IVersionUpdaterService versionUpdaterService, IStatsService statsService) + IVersionUpdaterService versionUpdaterService, IStatsService statsService, ICleanupService cleanupService) { _applicationLifetime = applicationLifetime; _logger = logger; @@ -39,6 +41,7 @@ namespace API.Controllers _cacheService = cacheService; _versionUpdaterService = versionUpdaterService; _statsService = statsService; + _cleanupService = cleanupService; } /// @@ -62,7 +65,7 @@ namespace API.Controllers public ActionResult ClearCache() { _logger.LogInformation("{UserName} is clearing cache of server from admin dashboard", User.GetUsername()); - _cacheService.Cleanup(); + _cleanupService.CleanupCacheDirectory(); return Ok(); } @@ -93,7 +96,7 @@ namespace API.Controllers [HttpGet("logs")] public async Task GetLogs() { - var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); + var files = _backupService.GetLogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); try { var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files, "logs"); diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index c8b3248ba..77b1609e2 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -3,12 +3,11 @@ using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Data; using API.DTOs.Settings; using API.Entities.Enums; using API.Extensions; using API.Helpers.Converters; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using Kavita.Common; using Kavita.Common.Extensions; diff --git a/API/Controllers/UploadController.cs b/API/Controllers/UploadController.cs index e873c788d..6c37eac62 100644 --- a/API/Controllers/UploadController.cs +++ b/API/Controllers/UploadController.cs @@ -1,8 +1,7 @@ using System; using System.Threading.Tasks; +using API.Data; using API.DTOs.Uploads; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index f5171b819..7662fdf95 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -1,10 +1,10 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; +using API.Data; using API.Data.Repositories; using API.DTOs; using API.Extensions; -using API.Interfaces; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; diff --git a/API/DTOs/Reader/ChapterInfoDto.cs b/API/DTOs/Reader/ChapterInfoDto.cs index ec512670d..e29f3798c 100644 --- a/API/DTOs/Reader/ChapterInfoDto.cs +++ b/API/DTOs/Reader/ChapterInfoDto.cs @@ -1,4 +1,5 @@ -using API.Entities.Enums; +using System; +using API.Entities.Enums; namespace API.DTOs.Reader { @@ -12,7 +13,7 @@ namespace API.DTOs.Reader public MangaFormat SeriesFormat { get; set; } public int SeriesId { get; set; } public int LibraryId { get; set; } - public string ChapterTitle { get; set; } = ""; + public string ChapterTitle { get; set; } = string.Empty; public int Pages { get; set; } public string FileName { get; set; } public bool IsSpecial { get; set; } diff --git a/API/Data/DbFactory.cs b/API/Data/DbFactory.cs index 4a5412609..5638bae00 100644 --- a/API/Data/DbFactory.cs +++ b/API/Data/DbFactory.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.IO; using API.Data.Metadata; using API.Entities; using API.Entities.Enums; diff --git a/API/Data/MigrateConfigFiles.cs b/API/Data/MigrateConfigFiles.cs index 752b03192..51ee37167 100644 --- a/API/Data/MigrateConfigFiles.cs +++ b/API/Data/MigrateConfigFiles.cs @@ -1,12 +1,16 @@ using System; using System.Collections.Generic; using System.IO; +using System.IO.Abstractions; using System.Linq; using API.Services; using Kavita.Common; namespace API.Data { + /// + /// A Migration to migrate config related files to the config/ directory for installs prior to v0.4.9. + /// public static class MigrateConfigFiles { private static readonly List LooseLeafFiles = new List() @@ -31,7 +35,7 @@ namespace API.Data /// In v0.4.8 we moved all config files to config/ to match with how docker was setup. This will move all config files from current directory /// to config/ /// - public static void Migrate(bool isDocker) + public static void Migrate(bool isDocker, IDirectoryService directoryService) { Console.WriteLine("Checking if migration to config/ is needed"); @@ -46,8 +50,8 @@ namespace API.Data Console.WriteLine( "Migrating files from pre-v0.4.8. All Kavita config files are now located in config/"); - CopyAppFolders(); - DeleteAppFolders(); + CopyAppFolders(directoryService); + DeleteAppFolders(directoryService); UpdateConfiguration(); @@ -64,14 +68,14 @@ namespace API.Data Console.WriteLine( "Migrating files from pre-v0.4.8. All Kavita config files are now located in config/"); - Console.WriteLine($"Creating {DirectoryService.ConfigDirectory}"); - DirectoryService.ExistOrCreate(DirectoryService.ConfigDirectory); + Console.WriteLine($"Creating {directoryService.ConfigDirectory}"); + directoryService.ExistOrCreate(directoryService.ConfigDirectory); try { - CopyLooseLeafFiles(); + CopyLooseLeafFiles(directoryService); - CopyAppFolders(); + CopyAppFolders(directoryService); // Then we need to update the config file to point to the new DB file UpdateConfiguration(); @@ -84,43 +88,43 @@ namespace API.Data // Finally delete everything in the source directory Console.WriteLine("Removing old files"); - DeleteLooseFiles(); - DeleteAppFolders(); + DeleteLooseFiles(directoryService); + DeleteAppFolders(directoryService); Console.WriteLine("Removing old files...DONE"); Console.WriteLine("Migration complete. All config files are now in config/ directory"); } - private static void DeleteAppFolders() + private static void DeleteAppFolders(IDirectoryService directoryService) { foreach (var folderToDelete in AppFolders) { if (!new DirectoryInfo(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)).Exists) continue; - DirectoryService.ClearAndDeleteDirectory(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)); + directoryService.ClearAndDeleteDirectory(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)); } } - private static void DeleteLooseFiles() + private static void DeleteLooseFiles(IDirectoryService directoryService) { var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file))) .Where(f => f.Exists); - DirectoryService.DeleteFiles(configFiles.Select(f => f.FullName)); + directoryService.DeleteFiles(configFiles.Select(f => f.FullName)); } - private static void CopyAppFolders() + private static void CopyAppFolders(IDirectoryService directoryService) { Console.WriteLine("Moving folders to config"); foreach (var folderToMove in AppFolders) { - if (new DirectoryInfo(Path.Join(DirectoryService.ConfigDirectory, folderToMove)).Exists) continue; + if (new DirectoryInfo(Path.Join(directoryService.ConfigDirectory, folderToMove)).Exists) continue; try { - DirectoryService.CopyDirectoryToDirectory( - Path.Join(Directory.GetCurrentDirectory(), folderToMove), - Path.Join(DirectoryService.ConfigDirectory, folderToMove)); + directoryService.CopyDirectoryToDirectory( + Path.Join(directoryService.FileSystem.Directory.GetCurrentDirectory(), folderToMove), + Path.Join(directoryService.ConfigDirectory, folderToMove)); } catch (Exception) { @@ -132,9 +136,9 @@ namespace API.Data Console.WriteLine("Moving folders to config...DONE"); } - private static void CopyLooseLeafFiles() + private static void CopyLooseLeafFiles(IDirectoryService directoryService) { - var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file))) + var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(directoryService.FileSystem.Directory.GetCurrentDirectory(), file))) .Where(f => f.Exists); // First step is to move all the files Console.WriteLine("Moving files to config/"); @@ -142,7 +146,7 @@ namespace API.Data { try { - fileInfo.CopyTo(Path.Join(DirectoryService.ConfigDirectory, fileInfo.Name)); + fileInfo.CopyTo(Path.Join(directoryService.ConfigDirectory, fileInfo.Name)); } catch (Exception) { diff --git a/API/Data/MigrateCoverImages.cs b/API/Data/MigrateCoverImages.cs index 87e65cb81..83565b805 100644 --- a/API/Data/MigrateCoverImages.cs +++ b/API/Data/MigrateCoverImages.cs @@ -29,10 +29,10 @@ namespace API.Data /// /// Run first. Will extract byte[]s from DB and write them to the cover directory. /// - public static void ExtractToImages(DbContext context) + public static void ExtractToImages(DbContext context, IDirectoryService directoryService, IImageService imageService) { Console.WriteLine("Migrating Cover Images to disk. Expect delay."); - DirectoryService.ExistOrCreate(DirectoryService.CoverImageDirectory); + directoryService.ExistOrCreate(directoryService.CoverImageDirectory); Console.WriteLine("Extracting cover images for Series"); var lockedSeries = SqlHelper.RawSqlQuery(context, "Select Id, CoverImage From Series Where CoverImage IS NOT NULL", x => @@ -45,14 +45,14 @@ namespace API.Data foreach (var series in lockedSeries) { if (series.CoverImage == null || !series.CoverImage.Any()) continue; - if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetSeriesFormat(int.Parse(series.Id))}.png"))) continue; try { var stream = new MemoryStream(series.CoverImage); stream.Position = 0; - ImageService.WriteCoverThumbnail(stream, ImageService.GetSeriesFormat(int.Parse(series.Id))); + imageService.WriteCoverThumbnail(stream, ImageService.GetSeriesFormat(int.Parse(series.Id))); } catch (Exception e) { @@ -71,14 +71,14 @@ namespace API.Data foreach (var chapter in chapters) { if (chapter.CoverImage == null || !chapter.CoverImage.Any()) continue; - if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}.png"))) continue; try { var stream = new MemoryStream(chapter.CoverImage); stream.Position = 0; - ImageService.WriteCoverThumbnail(stream, $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}"); + imageService.WriteCoverThumbnail(stream, $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}"); } catch (Exception e) { @@ -97,13 +97,13 @@ namespace API.Data foreach (var tag in tags) { if (tag.CoverImage == null || !tag.CoverImage.Any()) continue; - if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (directoryService.FileSystem.File.Exists(Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}.png"))) continue; try { var stream = new MemoryStream(tag.CoverImage); stream.Position = 0; - ImageService.WriteCoverThumbnail(stream, $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}"); + imageService.WriteCoverThumbnail(stream, $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}"); } catch (Exception e) { @@ -116,13 +116,13 @@ namespace API.Data /// Run after . Will update the DB with names of files that were extracted. /// /// - public static async Task UpdateDatabaseWithImages(DataContext context) + public static async Task UpdateDatabaseWithImages(DataContext context, IDirectoryService directoryService) { Console.WriteLine("Updating Series entities"); var seriesCovers = await context.Series.Where(s => !string.IsNullOrEmpty(s.CoverImage)).ToListAsync(); foreach (var series in seriesCovers) { - if (!File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (!directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetSeriesFormat(series.Id)}.png"))) continue; series.CoverImage = $"{ImageService.GetSeriesFormat(series.Id)}.png"; } @@ -133,7 +133,7 @@ namespace API.Data var chapters = await context.Chapter.ToListAsync(); foreach (var chapter in chapters) { - if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png"))) { chapter.CoverImage = $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png"; @@ -149,7 +149,7 @@ namespace API.Data { var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), ChapterSortComparerForInChapterSorting).FirstOrDefault(); if (firstChapter == null) continue; - if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png"))) { volume.CoverImage = $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png"; @@ -163,7 +163,7 @@ namespace API.Data var tags = await context.CollectionTag.ToListAsync(); foreach (var tag in tags) { - if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, + if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory, $"{ImageService.GetCollectionTagFormat(tag.Id)}.png"))) { tag.CoverImage = $"{ImageService.GetCollectionTagFormat(tag.Id)}.png"; diff --git a/API/Data/Repositories/AppUserProgressRepository.cs b/API/Data/Repositories/AppUserProgressRepository.cs index c91e61cd0..37fc68693 100644 --- a/API/Data/Repositories/AppUserProgressRepository.cs +++ b/API/Data/Repositories/AppUserProgressRepository.cs @@ -2,78 +2,84 @@ using System.Threading.Tasks; using API.Entities; using API.Entities.Enums; -using API.Interfaces.Repositories; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface IAppUserProgressRepository { - public class AppUserProgressRepository : IAppUserProgressRepository + void Update(AppUserProgress userProgress); + Task CleanupAbandonedChapters(); + Task UserHasProgress(LibraryType libraryType, int userId); + Task GetUserProgressAsync(int chapterId, int userId); +} + +public class AppUserProgressRepository : IAppUserProgressRepository +{ + private readonly DataContext _context; + + public AppUserProgressRepository(DataContext context) { - private readonly DataContext _context; + _context = context; + } - public AppUserProgressRepository(DataContext context) - { - _context = context; - } + public void Update(AppUserProgress userProgress) + { + _context.Entry(userProgress).State = EntityState.Modified; + } - public void Update(AppUserProgress userProgress) - { - _context.Entry(userProgress).State = EntityState.Modified; - } + /// + /// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well. + /// + public async Task CleanupAbandonedChapters() + { + var chapterIds = _context.Chapter.Select(c => c.Id); - /// - /// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well. - /// - public async Task CleanupAbandonedChapters() - { - var chapterIds = _context.Chapter.Select(c => c.Id); + var rowsToRemove = await _context.AppUserProgresses + .Where(progress => !chapterIds.Contains(progress.ChapterId)) + .ToListAsync(); - var rowsToRemove = await _context.AppUserProgresses - .Where(progress => !chapterIds.Contains(progress.ChapterId)) - .ToListAsync(); + var rowsToRemoveBookmarks = await _context.AppUserBookmark + .Where(progress => !chapterIds.Contains(progress.ChapterId)) + .ToListAsync(); - var rowsToRemoveBookmarks = await _context.AppUserBookmark - .Where(progress => !chapterIds.Contains(progress.ChapterId)) - .ToListAsync(); + var rowsToRemoveReadingLists = await _context.ReadingListItem + .Where(item => !chapterIds.Contains(item.ChapterId)) + .ToListAsync(); - var rowsToRemoveReadingLists = await _context.ReadingListItem - .Where(item => !chapterIds.Contains(item.ChapterId)) - .ToListAsync(); + _context.RemoveRange(rowsToRemove); + _context.RemoveRange(rowsToRemoveBookmarks); + _context.RemoveRange(rowsToRemoveReadingLists); + return await _context.SaveChangesAsync() > 0 ? rowsToRemove.Count : 0; + } - _context.RemoveRange(rowsToRemove); - _context.RemoveRange(rowsToRemoveBookmarks); - _context.RemoveRange(rowsToRemoveReadingLists); - return await _context.SaveChangesAsync() > 0 ? rowsToRemove.Count : 0; - } + /// + /// Checks if user has any progress against a library of passed type + /// + /// + /// + /// + public async Task UserHasProgress(LibraryType libraryType, int userId) + { + var seriesIds = await _context.AppUserProgresses + .Where(aup => aup.PagesRead > 0 && aup.AppUserId == userId) + .AsNoTracking() + .Select(aup => aup.SeriesId) + .ToListAsync(); - /// - /// Checks if user has any progress against a library of passed type - /// - /// - /// - /// - public async Task UserHasProgress(LibraryType libraryType, int userId) - { - var seriesIds = await _context.AppUserProgresses - .Where(aup => aup.PagesRead > 0 && aup.AppUserId == userId) - .AsNoTracking() - .Select(aup => aup.SeriesId) - .ToListAsync(); + if (seriesIds.Count == 0) return false; - if (seriesIds.Count == 0) return false; + return await _context.Series + .Include(s => s.Library) + .Where(s => seriesIds.Contains(s.Id) && s.Library.Type == libraryType) + .AsNoTracking() + .AnyAsync(); + } - return await _context.Series - .Include(s => s.Library) - .Where(s => seriesIds.Contains(s.Id) && s.Library.Type == libraryType) - .AsNoTracking() - .AnyAsync(); - } - - public async Task GetUserProgressAsync(int chapterId, int userId) - { - return await _context.AppUserProgresses - .Where(p => p.ChapterId == chapterId && p.AppUserId == userId) - .FirstOrDefaultAsync(); - } + public async Task GetUserProgressAsync(int chapterId, int userId) + { + return await _context.AppUserProgresses + .Where(p => p.ChapterId == chapterId && p.AppUserId == userId) + .FirstOrDefaultAsync(); } } diff --git a/API/Data/Repositories/ChapterRepository.cs b/API/Data/Repositories/ChapterRepository.cs index 551648a2e..7fca84c23 100644 --- a/API/Data/Repositories/ChapterRepository.cs +++ b/API/Data/Repositories/ChapterRepository.cs @@ -1,195 +1,207 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.DTOs.Reader; using API.Entities; -using API.Interfaces.Repositories; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface IChapterRepository { - public class ChapterRepository : IChapterRepository + void Update(Chapter chapter); + Task> GetChaptersByIdsAsync(IList chapterIds); + Task GetChapterInfoDtoAsync(int chapterId); + Task GetChapterTotalPagesAsync(int chapterId); + Task GetChapterAsync(int chapterId); + Task GetChapterDtoAsync(int chapterId); + Task> GetFilesForChapterAsync(int chapterId); + Task> GetChaptersAsync(int volumeId); + Task> GetFilesForChaptersAsync(IReadOnlyList chapterIds); + Task GetChapterCoverImageAsync(int chapterId); + Task> GetAllCoverImagesAsync(); + Task> GetCoverImagesForLockedChaptersAsync(); +} +public class ChapterRepository : IChapterRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public ChapterRepository(DataContext context, IMapper mapper) { - private readonly DataContext _context; - private readonly IMapper _mapper; + _context = context; + _mapper = mapper; + } - public ChapterRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } + public void Update(Chapter chapter) + { + _context.Entry(chapter).State = EntityState.Modified; + } - public void Update(Chapter chapter) - { - _context.Entry(chapter).State = EntityState.Modified; - } + public async Task> GetChaptersByIdsAsync(IList chapterIds) + { + return await _context.Chapter + .Where(c => chapterIds.Contains(c.Id)) + .Include(c => c.Volume) + .ToListAsync(); + } - public async Task> GetChaptersByIdsAsync(IList chapterIds) - { - return await _context.Chapter - .Where(c => chapterIds.Contains(c.Id)) - .Include(c => c.Volume) - .ToListAsync(); - } + /// + /// Populates a partial IChapterInfoDto + /// + /// + public async Task GetChapterInfoDtoAsync(int chapterId) + { + var chapterInfo = await _context.Chapter + .Where(c => c.Id == chapterId) + .Join(_context.Volume, c => c.VolumeId, v => v.Id, (chapter, volume) => new + { + ChapterNumber = chapter.Range, + VolumeNumber = volume.Number, + VolumeId = volume.Id, + chapter.IsSpecial, + chapter.TitleName, + volume.SeriesId, + chapter.Pages, + }) + .Join(_context.Series, data => data.SeriesId, series => series.Id, (data, series) => new + { + data.ChapterNumber, + data.VolumeNumber, + data.VolumeId, + data.IsSpecial, + data.SeriesId, + data.Pages, + data.TitleName, + SeriesFormat = series.Format, + SeriesName = series.Name, + series.LibraryId + }) + .Select(data => new ChapterInfoDto() + { + ChapterNumber = data.ChapterNumber, + VolumeNumber = data.VolumeNumber + string.Empty, + VolumeId = data.VolumeId, + IsSpecial = data.IsSpecial, + SeriesId =data.SeriesId, + SeriesFormat = data.SeriesFormat, + SeriesName = data.SeriesName, + LibraryId = data.LibraryId, + Pages = data.Pages, + ChapterTitle = data.TitleName + }) + .AsNoTracking() + .SingleOrDefaultAsync(); - /// - /// Populates a partial IChapterInfoDto - /// - /// - public async Task GetChapterInfoDtoAsync(int chapterId) - { - var chapterInfo = await _context.Chapter - .Where(c => c.Id == chapterId) - .Join(_context.Volume, c => c.VolumeId, v => v.Id, (chapter, volume) => new - { - ChapterNumber = chapter.Range, - VolumeNumber = volume.Number, - VolumeId = volume.Id, - chapter.IsSpecial, - chapter.TitleName, - volume.SeriesId, - chapter.Pages, - }) - .Join(_context.Series, data => data.SeriesId, series => series.Id, (data, series) => new - { - data.ChapterNumber, - data.VolumeNumber, - data.VolumeId, - data.IsSpecial, - data.SeriesId, - data.Pages, - data.TitleName, - SeriesFormat = series.Format, - SeriesName = series.Name, - series.LibraryId - }) - .Select(data => new ChapterInfoDto() - { - ChapterNumber = data.ChapterNumber, - VolumeNumber = data.VolumeNumber + string.Empty, - VolumeId = data.VolumeId, - IsSpecial = data.IsSpecial, - SeriesId =data.SeriesId, - SeriesFormat = data.SeriesFormat, - SeriesName = data.SeriesName, - LibraryId = data.LibraryId, - Pages = data.Pages, - ChapterTitle = data.TitleName - }) - .AsNoTracking() - .SingleOrDefaultAsync(); + return chapterInfo; + } - return chapterInfo; - } + public Task GetChapterTotalPagesAsync(int chapterId) + { + return _context.Chapter + .Where(c => c.Id == chapterId) + .Select(c => c.Pages) + .SingleOrDefaultAsync(); + } + public async Task GetChapterDtoAsync(int chapterId) + { + var chapter = await _context.Chapter + .Include(c => c.Files) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .SingleOrDefaultAsync(c => c.Id == chapterId); - public Task GetChapterTotalPagesAsync(int chapterId) - { - return _context.Chapter - .Where(c => c.Id == chapterId) - .Select(c => c.Pages) - .SingleOrDefaultAsync(); - } - public async Task GetChapterDtoAsync(int chapterId) - { - var chapter = await _context.Chapter - .Include(c => c.Files) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking() - .SingleOrDefaultAsync(c => c.Id == chapterId); + return chapter; + } - return chapter; - } + /// + /// Returns non-tracked files for a given chapterId + /// + /// + /// + public async Task> GetFilesForChapterAsync(int chapterId) + { + return await _context.MangaFile + .Where(c => chapterId == c.ChapterId) + .AsNoTracking() + .ToListAsync(); + } - /// - /// Returns non-tracked files for a given chapterId - /// - /// - /// - public async Task> GetFilesForChapterAsync(int chapterId) - { - return await _context.MangaFile - .Where(c => chapterId == c.ChapterId) - .AsNoTracking() - .ToListAsync(); - } + /// + /// Returns a Chapter for an Id. Includes linked s. + /// + /// + /// + public async Task GetChapterAsync(int chapterId) + { + return await _context.Chapter + .Include(c => c.Files) + .SingleOrDefaultAsync(c => c.Id == chapterId); + } - /// - /// Returns a Chapter for an Id. Includes linked s. - /// - /// - /// - public async Task GetChapterAsync(int chapterId) - { - return await _context.Chapter - .Include(c => c.Files) - .SingleOrDefaultAsync(c => c.Id == chapterId); - } + /// + /// Returns Chapters for a volume id. + /// + /// + /// + public async Task> GetChaptersAsync(int volumeId) + { + return await _context.Chapter + .Where(c => c.VolumeId == volumeId) + .ToListAsync(); + } - /// - /// Returns Chapters for a volume id. - /// - /// - /// - public async Task> GetChaptersAsync(int volumeId) - { - return await _context.Chapter - .Where(c => c.VolumeId == volumeId) - .ToListAsync(); - } + /// + /// Returns the cover image for a chapter id. + /// + /// + /// + public async Task GetChapterCoverImageAsync(int chapterId) + { - /// - /// Returns the cover image for a chapter id. - /// - /// - /// - public async Task GetChapterCoverImageAsync(int chapterId) - { + return await _context.Chapter + .Where(c => c.Id == chapterId) + .Select(c => c.CoverImage) + .AsNoTracking() + .SingleOrDefaultAsync(); + } - return await _context.Chapter - .Where(c => c.Id == chapterId) - .Select(c => c.CoverImage) - .AsNoTracking() - .SingleOrDefaultAsync(); - } + public async Task> GetAllCoverImagesAsync() + { + return await _context.Chapter + .Select(c => c.CoverImage) + .Where(t => !string.IsNullOrEmpty(t)) + .AsNoTracking() + .ToListAsync(); + } - public async Task> GetAllCoverImagesAsync() - { - return await _context.Chapter - .Select(c => c.CoverImage) - .Where(t => !string.IsNullOrEmpty(t)) - .AsNoTracking() - .ToListAsync(); - } + /// + /// Returns cover images for locked chapters + /// + /// + public async Task> GetCoverImagesForLockedChaptersAsync() + { + return await _context.Chapter + .Where(c => c.CoverImageLocked) + .Select(c => c.CoverImage) + .Where(t => !string.IsNullOrEmpty(t)) + .AsNoTracking() + .ToListAsync(); + } - /// - /// Returns cover images for locked chapters - /// - /// - public async Task> GetCoverImagesForLockedChaptersAsync() - { - return await _context.Chapter - .Where(c => c.CoverImageLocked) - .Select(c => c.CoverImage) - .Where(t => !string.IsNullOrEmpty(t)) - .AsNoTracking() - .ToListAsync(); - } - - /// - /// Returns non-tracked files for a set of - /// - /// List of chapter Ids - /// - public async Task> GetFilesForChaptersAsync(IReadOnlyList chapterIds) - { - return await _context.MangaFile - .Where(c => chapterIds.Contains(c.ChapterId)) - .AsNoTracking() - .ToListAsync(); - } + /// + /// Returns non-tracked files for a set of + /// + /// List of chapter Ids + /// + public async Task> GetFilesForChaptersAsync(IReadOnlyList chapterIds) + { + return await _context.MangaFile + .Where(c => chapterIds.Contains(c.ChapterId)) + .AsNoTracking() + .ToListAsync(); } } diff --git a/API/Data/Repositories/CollectionTagRepository.cs b/API/Data/Repositories/CollectionTagRepository.cs index f04b4b364..111978c72 100644 --- a/API/Data/Repositories/CollectionTagRepository.cs +++ b/API/Data/Repositories/CollectionTagRepository.cs @@ -3,123 +3,136 @@ using System.Linq; using System.Threading.Tasks; using API.DTOs.CollectionTags; using API.Entities; -using API.Interfaces.Repositories; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface ICollectionTagRepository { - public class CollectionTagRepository : ICollectionTagRepository + void Add(CollectionTag tag); + void Remove(CollectionTag tag); + Task> GetAllTagDtosAsync(); + Task> SearchTagDtosAsync(string searchQuery); + Task GetCoverImageAsync(int collectionTagId); + Task> GetAllPromotedTagDtosAsync(); + Task GetTagAsync(int tagId); + Task GetFullTagAsync(int tagId); + void Update(CollectionTag tag); + Task RemoveTagsWithoutSeries(); + Task> GetAllTagsAsync(); + Task> GetAllCoverImagesAsync(); +} +public class CollectionTagRepository : ICollectionTagRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public CollectionTagRepository(DataContext context, IMapper mapper) { - private readonly DataContext _context; - private readonly IMapper _mapper; + _context = context; + _mapper = mapper; + } - public CollectionTagRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } + public void Add(CollectionTag tag) + { + _context.CollectionTag.Add(tag); + } - public void Add(CollectionTag tag) - { - _context.CollectionTag.Add(tag); - } + public void Remove(CollectionTag tag) + { + _context.CollectionTag.Remove(tag); + } - public void Remove(CollectionTag tag) - { - _context.CollectionTag.Remove(tag); - } + public void Update(CollectionTag tag) + { + _context.Entry(tag).State = EntityState.Modified; + } - public void Update(CollectionTag tag) - { - _context.Entry(tag).State = EntityState.Modified; - } + /// + /// Removes any collection tags without any series + /// + public async Task RemoveTagsWithoutSeries() + { + var tagsToDelete = await _context.CollectionTag + .Include(c => c.SeriesMetadatas) + .Where(c => c.SeriesMetadatas.Count == 0) + .ToListAsync(); + _context.RemoveRange(tagsToDelete); - /// - /// Removes any collection tags without any series - /// - public async Task RemoveTagsWithoutSeries() - { - var tagsToDelete = await _context.CollectionTag - .Include(c => c.SeriesMetadatas) - .Where(c => c.SeriesMetadatas.Count == 0) - .ToListAsync(); - _context.RemoveRange(tagsToDelete); + return await _context.SaveChangesAsync(); + } - return await _context.SaveChangesAsync(); - } + public async Task> GetAllTagsAsync() + { + return await _context.CollectionTag + .OrderBy(c => c.NormalizedTitle) + .ToListAsync(); + } - public async Task> GetAllTagsAsync() - { - return await _context.CollectionTag - .OrderBy(c => c.NormalizedTitle) - .ToListAsync(); - } + public async Task> GetAllCoverImagesAsync() + { + return await _context.CollectionTag + .Select(t => t.CoverImage) + .Where(t => !string.IsNullOrEmpty(t)) + .AsNoTracking() + .ToListAsync(); + } - public async Task> GetAllCoverImagesAsync() - { - return await _context.CollectionTag - .Select(t => t.CoverImage) - .Where(t => !string.IsNullOrEmpty(t)) - .AsNoTracking() - .ToListAsync(); - } + public async Task> GetAllTagDtosAsync() + { + return await _context.CollectionTag + .Select(c => c) + .OrderBy(c => c.NormalizedTitle) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } - public async Task> GetAllTagDtosAsync() - { - return await _context.CollectionTag - .Select(c => c) - .OrderBy(c => c.NormalizedTitle) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } + public async Task> GetAllPromotedTagDtosAsync() + { + return await _context.CollectionTag + .Where(c => c.Promoted) + .OrderBy(c => c.NormalizedTitle) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } - public async Task> GetAllPromotedTagDtosAsync() - { - return await _context.CollectionTag - .Where(c => c.Promoted) - .OrderBy(c => c.NormalizedTitle) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } + public async Task GetTagAsync(int tagId) + { + return await _context.CollectionTag + .Where(c => c.Id == tagId) + .SingleOrDefaultAsync(); + } - public async Task GetTagAsync(int tagId) - { - return await _context.CollectionTag - .Where(c => c.Id == tagId) - .SingleOrDefaultAsync(); - } + public async Task GetFullTagAsync(int tagId) + { + return await _context.CollectionTag + .Where(c => c.Id == tagId) + .Include(c => c.SeriesMetadatas) + .SingleOrDefaultAsync(); + } - public async Task GetFullTagAsync(int tagId) - { - return await _context.CollectionTag - .Where(c => c.Id == tagId) - .Include(c => c.SeriesMetadatas) - .SingleOrDefaultAsync(); - } + public async Task> SearchTagDtosAsync(string searchQuery) + { + return await _context.CollectionTag + .Where(s => EF.Functions.Like(s.Title, $"%{searchQuery}%") + || EF.Functions.Like(s.NormalizedTitle, $"%{searchQuery}%")) + .OrderBy(s => s.Title) + .AsNoTracking() + .OrderBy(c => c.NormalizedTitle) + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } - public async Task> SearchTagDtosAsync(string searchQuery) - { - return await _context.CollectionTag - .Where(s => EF.Functions.Like(s.Title, $"%{searchQuery}%") - || EF.Functions.Like(s.NormalizedTitle, $"%{searchQuery}%")) - .OrderBy(s => s.Title) - .AsNoTracking() - .OrderBy(c => c.NormalizedTitle) - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - public async Task GetCoverImageAsync(int collectionTagId) - { - return await _context.CollectionTag - .Where(c => c.Id == collectionTagId) - .Select(c => c.CoverImage) - .AsNoTracking() - .SingleOrDefaultAsync(); - } + public async Task GetCoverImageAsync(int collectionTagId) + { + return await _context.CollectionTag + .Where(c => c.Id == collectionTagId) + .Select(c => c.CoverImage) + .AsNoTracking() + .SingleOrDefaultAsync(); } } diff --git a/API/Data/Repositories/GenreRepository.cs b/API/Data/Repositories/GenreRepository.cs index 5424bb96e..271524994 100644 --- a/API/Data/Repositories/GenreRepository.cs +++ b/API/Data/Repositories/GenreRepository.cs @@ -2,12 +2,20 @@ using System.Linq; using System.Threading.Tasks; using API.Entities; -using API.Interfaces.Repositories; using AutoMapper; using Microsoft.EntityFrameworkCore; namespace API.Data.Repositories; +public interface IGenreRepository +{ + void Attach(Genre genre); + void Remove(Genre genre); + Task FindByNameAsync(string genreName); + Task> GetAllGenres(); + Task RemoveAllGenreNoLongerAssociated(bool removeExternal = false); +} + public class GenreRepository : IGenreRepository { private readonly DataContext _context; @@ -51,6 +59,6 @@ public class GenreRepository : IGenreRepository public async Task> GetAllGenres() { - return await _context.Genre.ToListAsync();; + return await _context.Genre.ToListAsync(); } } diff --git a/API/Data/Repositories/LibraryRepository.cs b/API/Data/Repositories/LibraryRepository.cs index caae93dd6..26fc517a2 100644 --- a/API/Data/Repositories/LibraryRepository.cs +++ b/API/Data/Repositories/LibraryRepository.cs @@ -5,194 +5,208 @@ using System.Threading.Tasks; using API.DTOs; using API.Entities; using API.Entities.Enums; -using API.Interfaces.Repositories; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +[Flags] +public enum LibraryIncludes { - - [Flags] - public enum LibraryIncludes - { - None = 1, - Series = 2, - AppUser = 4, - Folders = 8, - // Ratings = 16 - } - - public class LibraryRepository : ILibraryRepository - { - private readonly DataContext _context; - private readonly IMapper _mapper; - - public LibraryRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } - - public void Add(Library library) - { - _context.Library.Add(library); - } - - public void Update(Library library) - { - _context.Entry(library).State = EntityState.Modified; - } - - public void Delete(Library library) - { - _context.Library.Remove(library); - } - - public async Task> GetLibraryDtosForUsernameAsync(string userName) - { - return await _context.Library - .Include(l => l.AppUsers) - .Where(library => library.AppUsers.Any(x => x.UserName == userName)) - .OrderBy(l => l.Name) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking() - .AsSingleQuery() - .ToListAsync(); - } - - public async Task> GetLibrariesAsync() - { - return await _context.Library - .Include(l => l.AppUsers) - .ToListAsync(); - } - - public async Task DeleteLibrary(int libraryId) - { - var library = await GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.Series); - _context.Library.Remove(library); - return await _context.SaveChangesAsync() > 0; - } - - public async Task> GetLibrariesForUserIdAsync(int userId) - { - return await _context.Library - .Include(l => l.AppUsers) - .Where(l => l.AppUsers.Select(ap => ap.Id).Contains(userId)) - .AsNoTracking() - .ToListAsync(); - } - - public async Task GetLibraryTypeAsync(int libraryId) - { - return await _context.Library - .Where(l => l.Id == libraryId) - .AsNoTracking() - .Select(l => l.Type) - .SingleAsync(); - } - - public async Task> GetLibraryDtosAsync() - { - return await _context.Library - .Include(f => f.Folders) - .OrderBy(l => l.Name) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking() - .ToListAsync(); - } - - public async Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes) - { - - var query = _context.Library - .Where(x => x.Id == libraryId); - - query = AddIncludesToQuery(query, includes); - return await query.SingleAsync(); - } - - private static IQueryable AddIncludesToQuery(IQueryable query, LibraryIncludes includeFlags) - { - if (includeFlags.HasFlag(LibraryIncludes.Folders)) - { - query = query.Include(l => l.Folders); - } - - if (includeFlags.HasFlag(LibraryIncludes.Series)) - { - query = query.Include(l => l.Series); - } - - if (includeFlags.HasFlag(LibraryIncludes.AppUser)) - { - query = query.Include(l => l.AppUsers); - } - - return query; - } - - - /// - /// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed. - /// - /// - /// - public async Task GetFullLibraryForIdAsync(int libraryId) - { - return await _context.Library - .Where(x => x.Id == libraryId) - .Include(f => f.Folders) - .Include(l => l.Series) - .ThenInclude(s => s.Metadata) - .Include(l => l.Series) - .ThenInclude(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Files) - .AsSplitQuery() - .SingleAsync(); - } - - /// - /// This is a heavy call, pulls all entities for a Library, except this version only grabs for one series id - /// - /// - /// - /// - public async Task GetFullLibraryForIdAsync(int libraryId, int seriesId) - { - - return await _context.Library - .Where(x => x.Id == libraryId) - .Include(f => f.Folders) - .Include(l => l.Series.Where(s => s.Id == seriesId)) - .ThenInclude(s => s.Metadata) - .Include(l => l.Series.Where(s => s.Id == seriesId)) - .ThenInclude(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Files) - .AsSplitQuery() - .SingleAsync(); - } - - public async Task LibraryExists(string libraryName) - { - return await _context.Library - .AsNoTracking() - .AnyAsync(x => x.Name == libraryName); - } - - public async Task> GetLibrariesForUserAsync(AppUser user) - { - return await _context.Library - .Where(library => library.AppUsers.Contains(user)) - .Include(l => l.Folders) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - - } + None = 1, + Series = 2, + AppUser = 4, + Folders = 8, + // Ratings = 16 +} + +public interface ILibraryRepository +{ + void Add(Library library); + void Update(Library library); + void Delete(Library library); + Task> GetLibraryDtosAsync(); + Task LibraryExists(string libraryName); + Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes); + Task GetFullLibraryForIdAsync(int libraryId); + Task GetFullLibraryForIdAsync(int libraryId, int seriesId); + Task> GetLibraryDtosForUsernameAsync(string userName); + Task> GetLibrariesAsync(); + Task DeleteLibrary(int libraryId); + Task> GetLibrariesForUserIdAsync(int userId); + Task GetLibraryTypeAsync(int libraryId); +} + +public class LibraryRepository : ILibraryRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public LibraryRepository(DataContext context, IMapper mapper) + { + _context = context; + _mapper = mapper; + } + + public void Add(Library library) + { + _context.Library.Add(library); + } + + public void Update(Library library) + { + _context.Entry(library).State = EntityState.Modified; + } + + public void Delete(Library library) + { + _context.Library.Remove(library); + } + + public async Task> GetLibraryDtosForUsernameAsync(string userName) + { + return await _context.Library + .Include(l => l.AppUsers) + .Where(library => library.AppUsers.Any(x => x.UserName == userName)) + .OrderBy(l => l.Name) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .AsSingleQuery() + .ToListAsync(); + } + + public async Task> GetLibrariesAsync() + { + return await _context.Library + .Include(l => l.AppUsers) + .ToListAsync(); + } + + public async Task DeleteLibrary(int libraryId) + { + var library = await GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.Series); + _context.Library.Remove(library); + return await _context.SaveChangesAsync() > 0; + } + + public async Task> GetLibrariesForUserIdAsync(int userId) + { + return await _context.Library + .Include(l => l.AppUsers) + .Where(l => l.AppUsers.Select(ap => ap.Id).Contains(userId)) + .AsNoTracking() + .ToListAsync(); + } + + public async Task GetLibraryTypeAsync(int libraryId) + { + return await _context.Library + .Where(l => l.Id == libraryId) + .AsNoTracking() + .Select(l => l.Type) + .SingleAsync(); + } + + public async Task> GetLibraryDtosAsync() + { + return await _context.Library + .Include(f => f.Folders) + .OrderBy(l => l.Name) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .ToListAsync(); + } + + public async Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes) + { + + var query = _context.Library + .Where(x => x.Id == libraryId); + + query = AddIncludesToQuery(query, includes); + return await query.SingleAsync(); + } + + private static IQueryable AddIncludesToQuery(IQueryable query, LibraryIncludes includeFlags) + { + if (includeFlags.HasFlag(LibraryIncludes.Folders)) + { + query = query.Include(l => l.Folders); + } + + if (includeFlags.HasFlag(LibraryIncludes.Series)) + { + query = query.Include(l => l.Series); + } + + if (includeFlags.HasFlag(LibraryIncludes.AppUser)) + { + query = query.Include(l => l.AppUsers); + } + + return query; + } + + + /// + /// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed. + /// + /// + /// + public async Task GetFullLibraryForIdAsync(int libraryId) + { + return await _context.Library + .Where(x => x.Id == libraryId) + .Include(f => f.Folders) + .Include(l => l.Series) + .ThenInclude(s => s.Metadata) + .Include(l => l.Series) + .ThenInclude(s => s.Volumes) + .ThenInclude(v => v.Chapters) + .ThenInclude(c => c.Files) + .AsSplitQuery() + .SingleAsync(); + } + + /// + /// This is a heavy call, pulls all entities for a Library, except this version only grabs for one series id + /// + /// + /// + /// + public async Task GetFullLibraryForIdAsync(int libraryId, int seriesId) + { + + return await _context.Library + .Where(x => x.Id == libraryId) + .Include(f => f.Folders) + .Include(l => l.Series.Where(s => s.Id == seriesId)) + .ThenInclude(s => s.Metadata) + .Include(l => l.Series.Where(s => s.Id == seriesId)) + .ThenInclude(s => s.Volumes) + .ThenInclude(v => v.Chapters) + .ThenInclude(c => c.Files) + .AsSplitQuery() + .SingleAsync(); + } + + public async Task LibraryExists(string libraryName) + { + return await _context.Library + .AsNoTracking() + .AnyAsync(x => x.Name == libraryName); + } + + public async Task> GetLibrariesForUserAsync(AppUser user) + { + return await _context.Library + .Where(library => library.AppUsers.Contains(user)) + .Include(l => l.Folders) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + } diff --git a/API/Data/Repositories/PersonRepository.cs b/API/Data/Repositories/PersonRepository.cs index ed2d1a178..34ae22d59 100644 --- a/API/Data/Repositories/PersonRepository.cs +++ b/API/Data/Repositories/PersonRepository.cs @@ -2,59 +2,65 @@ using System.Linq; using System.Threading.Tasks; using API.Entities; -using API.Interfaces.Repositories; using AutoMapper; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface IPersonRepository { - public class PersonRepository : IPersonRepository + void Attach(Person person); + void Remove(Person person); + Task> GetAllPeople(); + Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false); +} + +public class PersonRepository : IPersonRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public PersonRepository(DataContext context, IMapper mapper) { - private readonly DataContext _context; - private readonly IMapper _mapper; + _context = context; + _mapper = mapper; + } - public PersonRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } + public void Attach(Person person) + { + _context.Person.Attach(person); + } - public void Attach(Person person) - { - _context.Person.Attach(person); - } + public void Remove(Person person) + { + _context.Person.Remove(person); + } - public void Remove(Person person) - { - _context.Person.Remove(person); - } + public async Task FindByNameAsync(string name) + { + var normalizedName = Parser.Parser.Normalize(name); + return await _context.Person + .Where(p => normalizedName.Equals(p.NormalizedName)) + .SingleOrDefaultAsync(); + } - public async Task FindByNameAsync(string name) - { - var normalizedName = Parser.Parser.Normalize(name); - return await _context.Person - .Where(p => normalizedName.Equals(p.NormalizedName)) - .SingleOrDefaultAsync(); - } + public async Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false) + { + var peopleWithNoConnections = await _context.Person + .Include(p => p.SeriesMetadatas) + .Include(p => p.ChapterMetadatas) + .Where(p => p.SeriesMetadatas.Count == 0 && p.ChapterMetadatas.Count == 0) + .ToListAsync(); - public async Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false) - { - var peopleWithNoConnections = await _context.Person - .Include(p => p.SeriesMetadatas) - .Include(p => p.ChapterMetadatas) - .Where(p => p.SeriesMetadatas.Count == 0 && p.ChapterMetadatas.Count == 0) - .ToListAsync(); + _context.Person.RemoveRange(peopleWithNoConnections); - _context.Person.RemoveRange(peopleWithNoConnections); - - await _context.SaveChangesAsync(); - } + await _context.SaveChangesAsync(); + } - public async Task> GetAllPeople() - { - return await _context.Person - .ToListAsync(); - } + public async Task> GetAllPeople() + { + return await _context.Person + .ToListAsync(); } } diff --git a/API/Data/Repositories/ReadingListRepository.cs b/API/Data/Repositories/ReadingListRepository.cs index fc9199ccb..329ec47a8 100644 --- a/API/Data/Repositories/ReadingListRepository.cs +++ b/API/Data/Repositories/ReadingListRepository.cs @@ -4,175 +4,187 @@ using System.Threading.Tasks; using API.DTOs.ReadingLists; using API.Entities; using API.Helpers; -using API.Interfaces.Repositories; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface IReadingListRepository { - public class ReadingListRepository : IReadingListRepository + Task> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams); + Task GetReadingListByIdAsync(int readingListId); + Task> GetReadingListItemDtosByIdAsync(int readingListId, int userId); + Task GetReadingListDtoByIdAsync(int readingListId, int userId); + Task> AddReadingProgressModifiers(int userId, IList items); + Task GetReadingListDtoByTitleAsync(string title); + Task> GetReadingListItemsByIdAsync(int readingListId); + void Remove(ReadingListItem item); + void BulkRemove(IEnumerable items); + void Update(ReadingList list); +} + +public class ReadingListRepository : IReadingListRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public ReadingListRepository(DataContext context, IMapper mapper) { - private readonly DataContext _context; - private readonly IMapper _mapper; + _context = context; + _mapper = mapper; + } - public ReadingListRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } + public void Update(ReadingList list) + { + _context.Entry(list).State = EntityState.Modified; + } - public void Update(ReadingList list) - { - _context.Entry(list).State = EntityState.Modified; - } + public void Remove(ReadingListItem item) + { + _context.ReadingListItem.Remove(item); + } - public void Remove(ReadingListItem item) - { - _context.ReadingListItem.Remove(item); - } - - public void BulkRemove(IEnumerable items) - { - _context.ReadingListItem.RemoveRange(items); - } + public void BulkRemove(IEnumerable items) + { + _context.ReadingListItem.RemoveRange(items); + } - public async Task> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams) - { - var query = _context.ReadingList - .Where(l => l.AppUserId == userId || (includePromoted && l.Promoted )) - .OrderBy(l => l.LastModified) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking(); + public async Task> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams) + { + var query = _context.ReadingList + .Where(l => l.AppUserId == userId || (includePromoted && l.Promoted )) + .OrderBy(l => l.LastModified) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking(); - return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); - } + return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); + } - public async Task GetReadingListByIdAsync(int readingListId) - { - return await _context.ReadingList - .Where(r => r.Id == readingListId) - .Include(r => r.Items.OrderBy(item => item.Order)) - .SingleOrDefaultAsync(); - } + public async Task GetReadingListByIdAsync(int readingListId) + { + return await _context.ReadingList + .Where(r => r.Id == readingListId) + .Include(r => r.Items.OrderBy(item => item.Order)) + .SingleOrDefaultAsync(); + } - public async Task> GetReadingListItemDtosByIdAsync(int readingListId, int userId) - { - var userLibraries = _context.Library - .Include(l => l.AppUsers) - .Where(library => library.AppUsers.Any(user => user.Id == userId)) - .AsNoTracking() - .Select(library => library.Id) - .ToList(); + public async Task> GetReadingListItemDtosByIdAsync(int readingListId, int userId) + { + var userLibraries = _context.Library + .Include(l => l.AppUsers) + .Where(library => library.AppUsers.Any(user => user.Id == userId)) + .AsNoTracking() + .Select(library => library.Id) + .ToList(); - var items = await _context.ReadingListItem - .Where(s => s.ReadingListId == readingListId) - .Join(_context.Chapter, s => s.ChapterId, chapter => chapter.Id, (data, chapter) => new - { - TotalPages = chapter.Pages, - ChapterNumber = chapter.Range, - readingListItem = data - }) - .Join(_context.Volume, s => s.readingListItem.VolumeId, volume => volume.Id, (data, volume) => new + var items = await _context.ReadingListItem + .Where(s => s.ReadingListId == readingListId) + .Join(_context.Chapter, s => s.ChapterId, chapter => chapter.Id, (data, chapter) => new + { + TotalPages = chapter.Pages, + ChapterNumber = chapter.Range, + readingListItem = data + }) + .Join(_context.Volume, s => s.readingListItem.VolumeId, volume => volume.Id, (data, volume) => new + { + data.readingListItem, + data.TotalPages, + data.ChapterNumber, + VolumeId = volume.Id, + VolumeNumber = volume.Name, + }) + .Join(_context.Series, s => s.readingListItem.SeriesId, series => series.Id, + (data, s) => new { + SeriesName = s.Name, + SeriesFormat = s.Format, + s.LibraryId, data.readingListItem, data.TotalPages, data.ChapterNumber, - VolumeId = volume.Id, - VolumeNumber = volume.Name, + data.VolumeNumber, + data.VolumeId }) - .Join(_context.Series, s => s.readingListItem.SeriesId, series => series.Id, - (data, s) => new - { - SeriesName = s.Name, - SeriesFormat = s.Format, - s.LibraryId, - data.readingListItem, - data.TotalPages, - data.ChapterNumber, - data.VolumeNumber, - data.VolumeId - }) - .Select(data => new ReadingListItemDto() - { - Id = data.readingListItem.Id, - ChapterId = data.readingListItem.ChapterId, - Order = data.readingListItem.Order, - SeriesId = data.readingListItem.SeriesId, - SeriesName = data.SeriesName, - SeriesFormat = data.SeriesFormat, - PagesTotal = data.TotalPages, - ChapterNumber = data.ChapterNumber, - VolumeNumber = data.VolumeNumber, - LibraryId = data.LibraryId, - VolumeId = data.VolumeId, - ReadingListId = data.readingListItem.ReadingListId - }) - .Where(o => userLibraries.Contains(o.LibraryId)) - .OrderBy(rli => rli.Order) - .AsNoTracking() - .ToListAsync(); - - // Attach progress information - var fetchedChapterIds = items.Select(i => i.ChapterId); - var progresses = await _context.AppUserProgresses - .Where(p => fetchedChapterIds.Contains(p.ChapterId)) - .AsNoTracking() - .ToListAsync(); - - foreach (var progress in progresses) + .Select(data => new ReadingListItemDto() { - var progressItem = items.SingleOrDefault(i => i.ChapterId == progress.ChapterId && i.ReadingListId == readingListId); - if (progressItem == null) continue; + Id = data.readingListItem.Id, + ChapterId = data.readingListItem.ChapterId, + Order = data.readingListItem.Order, + SeriesId = data.readingListItem.SeriesId, + SeriesName = data.SeriesName, + SeriesFormat = data.SeriesFormat, + PagesTotal = data.TotalPages, + ChapterNumber = data.ChapterNumber, + VolumeNumber = data.VolumeNumber, + LibraryId = data.LibraryId, + VolumeId = data.VolumeId, + ReadingListId = data.readingListItem.ReadingListId + }) + .Where(o => userLibraries.Contains(o.LibraryId)) + .OrderBy(rli => rli.Order) + .AsNoTracking() + .ToListAsync(); - progressItem.PagesRead = progress.PagesRead; - } + // Attach progress information + var fetchedChapterIds = items.Select(i => i.ChapterId); + var progresses = await _context.AppUserProgresses + .Where(p => fetchedChapterIds.Contains(p.ChapterId)) + .AsNoTracking() + .ToListAsync(); - return items; - } - - public async Task GetReadingListDtoByIdAsync(int readingListId, int userId) + foreach (var progress in progresses) { - return await _context.ReadingList - .Where(r => r.Id == readingListId && (r.AppUserId == userId || r.Promoted)) - .ProjectTo(_mapper.ConfigurationProvider) - .SingleOrDefaultAsync(); + var progressItem = items.SingleOrDefault(i => i.ChapterId == progress.ChapterId && i.ReadingListId == readingListId); + if (progressItem == null) continue; + + progressItem.PagesRead = progress.PagesRead; } - public async Task> AddReadingProgressModifiers(int userId, IList items) - { - var chapterIds = items.Select(i => i.ChapterId).Distinct().ToList(); - var userProgress = await _context.AppUserProgresses - .Where(p => p.AppUserId == userId && chapterIds.Contains(p.ChapterId)) - .AsNoTracking() - .ToListAsync(); - - foreach (var item in items) - { - var progress = userProgress.Where(p => p.ChapterId == item.ChapterId); - item.PagesRead = progress.Sum(p => p.PagesRead); - } - - return items; - } - - public async Task GetReadingListDtoByTitleAsync(string title) - { - return await _context.ReadingList - .Where(r => r.Title.Equals(title)) - .ProjectTo(_mapper.ConfigurationProvider) - .SingleOrDefaultAsync(); - } - - public async Task> GetReadingListItemsByIdAsync(int readingListId) - { - return await _context.ReadingListItem - .Where(r => r.ReadingListId == readingListId) - .OrderBy(r => r.Order) - .ToListAsync(); - } - - + return items; } + + public async Task GetReadingListDtoByIdAsync(int readingListId, int userId) + { + return await _context.ReadingList + .Where(r => r.Id == readingListId && (r.AppUserId == userId || r.Promoted)) + .ProjectTo(_mapper.ConfigurationProvider) + .SingleOrDefaultAsync(); + } + + public async Task> AddReadingProgressModifiers(int userId, IList items) + { + var chapterIds = items.Select(i => i.ChapterId).Distinct().ToList(); + var userProgress = await _context.AppUserProgresses + .Where(p => p.AppUserId == userId && chapterIds.Contains(p.ChapterId)) + .AsNoTracking() + .ToListAsync(); + + foreach (var item in items) + { + var progress = userProgress.Where(p => p.ChapterId == item.ChapterId); + item.PagesRead = progress.Sum(p => p.PagesRead); + } + + return items; + } + + public async Task GetReadingListDtoByTitleAsync(string title) + { + return await _context.ReadingList + .Where(r => r.Title.Equals(title)) + .ProjectTo(_mapper.ConfigurationProvider) + .SingleOrDefaultAsync(); + } + + public async Task> GetReadingListItemsByIdAsync(int readingListId) + { + return await _context.ReadingListItem + .Where(r => r.ReadingListId == readingListId) + .OrderBy(r => r.Order) + .ToListAsync(); + } + + } diff --git a/API/Data/Repositories/SeriesMetadataRepository.cs b/API/Data/Repositories/SeriesMetadataRepository.cs index fea430686..0a3efee26 100644 --- a/API/Data/Repositories/SeriesMetadataRepository.cs +++ b/API/Data/Repositories/SeriesMetadataRepository.cs @@ -1,21 +1,23 @@ -using API.Entities; -using API.Entities.Metadata; -using API.Interfaces.Repositories; +using API.Entities.Metadata; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface ISeriesMetadataRepository { - public class SeriesMetadataRepository : ISeriesMetadataRepository + void Update(SeriesMetadata seriesMetadata); +} + +public class SeriesMetadataRepository : ISeriesMetadataRepository +{ + private readonly DataContext _context; + + public SeriesMetadataRepository(DataContext context) { - private readonly DataContext _context; + _context = context; + } - public SeriesMetadataRepository(DataContext context) - { - _context = context; - } - - public void Update(SeriesMetadata seriesMetadata) - { - _context.SeriesMetadata.Update(seriesMetadata); - } + public void Update(SeriesMetadata seriesMetadata) + { + _context.SeriesMetadata.Update(seriesMetadata); } } diff --git a/API/Data/Repositories/SeriesRepository.cs b/API/Data/Repositories/SeriesRepository.cs index 54a1fc230..2dcddd5da 100644 --- a/API/Data/Repositories/SeriesRepository.cs +++ b/API/Data/Repositories/SeriesRepository.cs @@ -11,405 +11,353 @@ using API.Entities.Enums; using API.Entities.Metadata; using API.Extensions; using API.Helpers; -using API.Interfaces.Repositories; using API.Services.Tasks; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface ISeriesRepository { - public class SeriesRepository : ISeriesRepository + void Attach(Series series); + void Update(Series series); + void Remove(Series series); + void Remove(IEnumerable series); + Task DoesSeriesNameExistInLibrary(string name, MangaFormat format); + /// + /// Adds user information like progress, ratings, etc + /// + /// + /// + /// + /// + Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter); + /// + /// Does not add user information like progress, ratings, etc. + /// + /// + /// Series name to search for + /// + Task> SearchSeries(int[] libraryIds, string searchQuery); + Task> GetSeriesForLibraryIdAsync(int libraryId); + Task GetSeriesDtoByIdAsync(int seriesId, int userId); + Task DeleteSeriesAsync(int seriesId); + Task GetSeriesByIdAsync(int seriesId); + Task> GetSeriesByIdsAsync(IList seriesIds); + Task GetChapterIdsForSeriesAsync(int[] seriesIds); + Task>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds); + /// + /// Used to add Progress/Rating information to series list. + /// + /// + /// + /// + Task AddSeriesModifiers(int userId, List series); + Task GetSeriesCoverImageAsync(int seriesId); + Task> GetOnDeck(int userId, int libraryId, UserParams userParams, FilterDto filter); + Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo + Task GetSeriesMetadata(int seriesId); + Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams); + Task> GetFilesForSeries(int seriesId); + Task> GetSeriesDtoForIdsAsync(IEnumerable seriesIds, int userId); + Task> GetAllCoverImagesAsync(); + Task> GetLockedCoverImagesAsync(); + Task> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams); + Task GetFullSeriesForSeriesIdAsync(int seriesId); + Task GetChunkInfo(int libraryId = 0); + Task> GetSeriesMetadataForIdsAsync(IEnumerable seriesIds); +} + +public class SeriesRepository : ISeriesRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + public SeriesRepository(DataContext context, IMapper mapper) { - private readonly DataContext _context; - private readonly IMapper _mapper; - public SeriesRepository(DataContext context, IMapper mapper) + _context = context; + _mapper = mapper; + } + + public void Attach(Series series) + { + _context.Series.Attach(series); + } + + public void Update(Series series) + { + _context.Entry(series).State = EntityState.Modified; + } + + public void Remove(Series series) + { + _context.Series.Remove(series); + } + + public void Remove(IEnumerable series) + { + _context.Series.RemoveRange(series); + } + + /// + /// Returns if a series name and format exists already in a library + /// + /// Name of series + /// Format of series + /// + public async Task DoesSeriesNameExistInLibrary(string name, MangaFormat format) + { + var libraries = _context.Series + .AsNoTracking() + .Where(x => x.Name.Equals(name) && x.Format == format) + .Select(s => s.LibraryId); + + return await _context.Series + .AsNoTracking() + .Where(s => libraries.Contains(s.LibraryId) && s.Name.Equals(name) && s.Format == format) + .CountAsync() > 1; + } + + public async Task> GetSeriesForLibraryIdAsync(int libraryId) + { + return await _context.Series + .Where(s => s.LibraryId == libraryId) + .OrderBy(s => s.SortName) + .ToListAsync(); + } + + /// + /// Used for to + /// + /// + /// + public async Task> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams) + { + var query = _context.Series + .Where(s => s.LibraryId == libraryId) + .Include(s => s.Metadata) + .ThenInclude(m => m.People) + .Include(s => s.Metadata) + .ThenInclude(m => m.Genres) + .Include(s => s.Volumes) + .ThenInclude(v => v.Chapters) + .ThenInclude(cm => cm.People) + .Include(s => s.Volumes) + .ThenInclude(v => v.Chapters) + .ThenInclude(c => c.Files) + .AsSplitQuery() + .OrderBy(s => s.SortName); + + return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); + } + + /// + /// This is a heavy call. Returns all entities down to Files and Library and Series Metadata. + /// + /// + /// + public async Task GetFullSeriesForSeriesIdAsync(int seriesId) + { + return await _context.Series + .Where(s => s.Id == seriesId) + .Include(s => s.Metadata) + .ThenInclude(m => m.People) + .Include(s => s.Metadata) + .ThenInclude(m => m.Genres) + .Include(s => s.Library) + .Include(s => s.Volumes) + .ThenInclude(v => v.Chapters) + .ThenInclude(cm => cm.People) + .Include(s => s.Volumes) + .ThenInclude(v => v.Chapters) + .ThenInclude(c => c.Files) + .AsSplitQuery() + .SingleOrDefaultAsync(); + } + + public async Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter) + { + var formats = filter.GetSqlFilter(); + var query = _context.Series + .Where(s => s.LibraryId == libraryId && formats.Contains(s.Format)) + .OrderBy(s => s.SortName) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking(); + + return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); + } + + public async Task> SearchSeries(int[] libraryIds, string searchQuery) + { + return await _context.Series + .Where(s => libraryIds.Contains(s.LibraryId)) + .Where(s => EF.Functions.Like(s.Name, $"%{searchQuery}%") + || EF.Functions.Like(s.OriginalName, $"%{searchQuery}%") + || EF.Functions.Like(s.LocalizedName, $"%{searchQuery}%")) + .Include(s => s.Library) + .OrderBy(s => s.SortName) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + + + + + + + + public async Task GetSeriesDtoByIdAsync(int seriesId, int userId) + { + var series = await _context.Series.Where(x => x.Id == seriesId) + .ProjectTo(_mapper.ConfigurationProvider) + .SingleAsync(); + + var seriesList = new List() {series}; + await AddSeriesModifiers(userId, seriesList); + + return seriesList[0]; + } + + + + + public async Task DeleteSeriesAsync(int seriesId) + { + var series = await _context.Series.Where(s => s.Id == seriesId).SingleOrDefaultAsync(); + _context.Series.Remove(series); + + return await _context.SaveChangesAsync() > 0; + } + + + /// + /// Returns Volumes, Metadata, and Collection Tags + /// + /// + /// + public async Task GetSeriesByIdAsync(int seriesId) + { + return await _context.Series + .Include(s => s.Volumes) + .Include(s => s.Metadata) + .ThenInclude(m => m.CollectionTags) + .Include(s => s.Metadata) + .ThenInclude(m => m.Genres) + .Include(s => s.Metadata) + .ThenInclude(m => m.People) + .Where(s => s.Id == seriesId) + .SingleOrDefaultAsync(); + } + + /// + /// Returns Volumes, Metadata, and Collection Tags + /// + /// + /// + public async Task> GetSeriesByIdsAsync(IList seriesIds) + { + return await _context.Series + .Include(s => s.Volumes) + .Include(s => s.Metadata) + .ThenInclude(m => m.CollectionTags) + .Where(s => seriesIds.Contains(s.Id)) + .ToListAsync(); + } + + public async Task GetChapterIdsForSeriesAsync(int[] seriesIds) + { + var volumes = await _context.Volume + .Where(v => seriesIds.Contains(v.SeriesId)) + .Include(v => v.Chapters) + .ToListAsync(); + + IList chapterIds = new List(); + foreach (var v in volumes) { - _context = context; - _mapper = mapper; - } - - public void Attach(Series series) - { - _context.Series.Attach(series); - } - - public void Update(Series series) - { - _context.Entry(series).State = EntityState.Modified; - } - - public void Remove(Series series) - { - _context.Series.Remove(series); - } - - public void Remove(IEnumerable series) - { - _context.Series.RemoveRange(series); - } - - /// - /// Returns if a series name and format exists already in a library - /// - /// Name of series - /// Format of series - /// - public async Task DoesSeriesNameExistInLibrary(string name, MangaFormat format) - { - var libraries = _context.Series - .AsNoTracking() - .Where(x => x.Name.Equals(name) && x.Format == format) - .Select(s => s.LibraryId); - - return await _context.Series - .AsNoTracking() - .Where(s => libraries.Contains(s.LibraryId) && s.Name.Equals(name) && s.Format == format) - .CountAsync() > 1; - } - - public async Task> GetSeriesForLibraryIdAsync(int libraryId) - { - return await _context.Series - .Where(s => s.LibraryId == libraryId) - .OrderBy(s => s.SortName) - .ToListAsync(); - } - - /// - /// Used for to - /// - /// - /// - public async Task> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams) - { - var query = _context.Series - .Where(s => s.LibraryId == libraryId) - .Include(s => s.Metadata) - .ThenInclude(m => m.People) - .Include(s => s.Metadata) - .ThenInclude(m => m.Genres) - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(cm => cm.People) - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Files) - .AsSplitQuery() - .OrderBy(s => s.SortName); - - return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); - } - - /// - /// This is a heavy call. Returns all entities down to Files and Library and Series Metadata. - /// - /// - /// - public async Task GetFullSeriesForSeriesIdAsync(int seriesId) - { - return await _context.Series - .Where(s => s.Id == seriesId) - .Include(s => s.Metadata) - .ThenInclude(m => m.People) - .Include(s => s.Metadata) - .ThenInclude(m => m.Genres) - .Include(s => s.Library) - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(cm => cm.People) - .Include(s => s.Volumes) - .ThenInclude(v => v.Chapters) - .ThenInclude(c => c.Files) - .AsSplitQuery() - .SingleOrDefaultAsync(); - } - - public async Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter) - { - var formats = filter.GetSqlFilter(); - var query = _context.Series - .Where(s => s.LibraryId == libraryId && formats.Contains(s.Format)) - .OrderBy(s => s.SortName) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking(); - - return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); - } - - public async Task> SearchSeries(int[] libraryIds, string searchQuery) - { - return await _context.Series - .Where(s => libraryIds.Contains(s.LibraryId)) - .Where(s => EF.Functions.Like(s.Name, $"%{searchQuery}%") - || EF.Functions.Like(s.OriginalName, $"%{searchQuery}%") - || EF.Functions.Like(s.LocalizedName, $"%{searchQuery}%")) - .Include(s => s.Library) - .OrderBy(s => s.SortName) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - - - - - - - - public async Task GetSeriesDtoByIdAsync(int seriesId, int userId) - { - var series = await _context.Series.Where(x => x.Id == seriesId) - .ProjectTo(_mapper.ConfigurationProvider) - .SingleAsync(); - - var seriesList = new List() {series}; - await AddSeriesModifiers(userId, seriesList); - - return seriesList[0]; - } - - - - - public async Task DeleteSeriesAsync(int seriesId) - { - var series = await _context.Series.Where(s => s.Id == seriesId).SingleOrDefaultAsync(); - _context.Series.Remove(series); - - return await _context.SaveChangesAsync() > 0; - } - - - /// - /// Returns Volumes, Metadata, and Collection Tags - /// - /// - /// - public async Task GetSeriesByIdAsync(int seriesId) - { - return await _context.Series - .Include(s => s.Volumes) - .Include(s => s.Metadata) - .ThenInclude(m => m.CollectionTags) - .Include(s => s.Metadata) - .ThenInclude(m => m.Genres) - .Include(s => s.Metadata) - .ThenInclude(m => m.People) - .Where(s => s.Id == seriesId) - .SingleOrDefaultAsync(); - } - - /// - /// Returns Volumes, Metadata, and Collection Tags - /// - /// - /// - public async Task> GetSeriesByIdsAsync(IList seriesIds) - { - return await _context.Series - .Include(s => s.Volumes) - .Include(s => s.Metadata) - .ThenInclude(m => m.CollectionTags) - .Where(s => seriesIds.Contains(s.Id)) - .ToListAsync(); - } - - public async Task GetChapterIdsForSeriesAsync(int[] seriesIds) - { - var volumes = await _context.Volume - .Where(v => seriesIds.Contains(v.SeriesId)) - .Include(v => v.Chapters) - .ToListAsync(); - - IList chapterIds = new List(); - foreach (var v in volumes) + foreach (var c in v.Chapters) { - foreach (var c in v.Chapters) + chapterIds.Add(c.Id); + } + } + + return chapterIds.ToArray(); + } + + /// + /// This returns a dictonary mapping seriesId -> list of chapters back for each series id passed + /// + /// + /// + public async Task>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds) + { + var volumes = await _context.Volume + .Where(v => seriesIds.Contains(v.SeriesId)) + .Include(v => v.Chapters) + .ToListAsync(); + + var seriesChapters = new Dictionary>(); + foreach (var v in volumes) + { + foreach (var c in v.Chapters) + { + if (!seriesChapters.ContainsKey(v.SeriesId)) { - chapterIds.Add(c.Id); + var list = new List(); + seriesChapters.Add(v.SeriesId, list); } + seriesChapters[v.SeriesId].Add(c.Id); } - - return chapterIds.ToArray(); } - /// - /// This returns a dictonary mapping seriesId -> list of chapters back for each series id passed - /// - /// - /// - public async Task>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds) + return seriesChapters; + } + + public async Task AddSeriesModifiers(int userId, List series) + { + var userProgress = await _context.AppUserProgresses + .Where(p => p.AppUserId == userId && series.Select(s => s.Id).Contains(p.SeriesId)) + .ToListAsync(); + + var userRatings = await _context.AppUserRating + .Where(r => r.AppUserId == userId && series.Select(s => s.Id).Contains(r.SeriesId)) + .ToListAsync(); + + foreach (var s in series) { - var volumes = await _context.Volume - .Where(v => seriesIds.Contains(v.SeriesId)) - .Include(v => v.Chapters) - .ToListAsync(); - - var seriesChapters = new Dictionary>(); - foreach (var v in volumes) - { - foreach (var c in v.Chapters) - { - if (!seriesChapters.ContainsKey(v.SeriesId)) - { - var list = new List(); - seriesChapters.Add(v.SeriesId, list); - } - seriesChapters[v.SeriesId].Add(c.Id); - } - } - - return seriesChapters; + s.PagesRead = userProgress.Where(p => p.SeriesId == s.Id).Sum(p => p.PagesRead); + var rating = userRatings.SingleOrDefault(r => r.SeriesId == s.Id); + if (rating == null) continue; + s.UserRating = rating.Rating; + s.UserReview = rating.Review; } + } - public async Task AddSeriesModifiers(int userId, List series) - { - var userProgress = await _context.AppUserProgresses - .Where(p => p.AppUserId == userId && series.Select(s => s.Id).Contains(p.SeriesId)) - .ToListAsync(); - - var userRatings = await _context.AppUserRating - .Where(r => r.AppUserId == userId && series.Select(s => s.Id).Contains(r.SeriesId)) - .ToListAsync(); - - foreach (var s in series) - { - s.PagesRead = userProgress.Where(p => p.SeriesId == s.Id).Sum(p => p.PagesRead); - var rating = userRatings.SingleOrDefault(r => r.SeriesId == s.Id); - if (rating == null) continue; - s.UserRating = rating.Rating; - s.UserReview = rating.Review; - } - } - - public async Task GetSeriesCoverImageAsync(int seriesId) - { - return await _context.Series - .Where(s => s.Id == seriesId) - .Select(s => s.CoverImage) - .AsNoTracking() - .SingleOrDefaultAsync(); - } + public async Task GetSeriesCoverImageAsync(int seriesId) + { + return await _context.Series + .Where(s => s.Id == seriesId) + .Select(s => s.CoverImage) + .AsNoTracking() + .SingleOrDefaultAsync(); + } - /// - /// Returns a list of Series that were added, ordered by Created desc - /// - /// - /// Library to restrict to, if 0, will apply to all libraries - /// Contains pagination information - /// Optional filter on query - /// - public async Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter) - { - var formats = filter.GetSqlFilter(); + /// + /// Returns a list of Series that were added, ordered by Created desc + /// + /// + /// Library to restrict to, if 0, will apply to all libraries + /// Contains pagination information + /// Optional filter on query + /// + public async Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter) + { + var formats = filter.GetSqlFilter(); - if (libraryId == 0) - { - var userLibraries = _context.Library - .Include(l => l.AppUsers) - .Where(library => library.AppUsers.Any(user => user.Id == userId)) - .AsNoTracking() - .Select(library => library.Id) - .ToList(); - - var allQuery = _context.Series - .Where(s => userLibraries.Contains(s.LibraryId) && formats.Contains(s.Format)) - .OrderByDescending(s => s.Created) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking(); - - return await PagedList.CreateAsync(allQuery, userParams.PageNumber, userParams.PageSize); - } - - var query = _context.Series - .Where(s => s.LibraryId == libraryId && formats.Contains(s.Format)) - .OrderByDescending(s => s.Created) - .ProjectTo(_mapper.ConfigurationProvider) - .AsSplitQuery() - .AsNoTracking(); - - return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); - } - - /// - /// Returns Series that the user has some partial progress on. Sorts based on activity. Sort first by User progress, but if a series - /// has been updated recently, bump it to the front. - /// - /// - /// Library to restrict to, if 0, will apply to all libraries - /// Pagination information - /// Optional (default null) filter on query - /// - public async Task> GetOnDeck(int userId, int libraryId, UserParams userParams, FilterDto filter) - { - var formats = filter.GetSqlFilter(); - IList userLibraries; - if (libraryId == 0) - { - userLibraries = _context.Library - .Include(l => l.AppUsers) - .Where(library => library.AppUsers.Any(user => user.Id == userId)) - .AsNoTracking() - .Select(library => library.Id) - .ToList(); - } - else - { - userLibraries = new List() {libraryId}; - } - - var series = _context.Series - .Where(s => formats.Contains(s.Format) && userLibraries.Contains(s.LibraryId)) - .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new - { - Series = s, - PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id && s1.AppUserId == userId).Sum(s1 => s1.PagesRead), - progress.AppUserId, - LastModified = _context.AppUserProgresses.Where(p => p.Id == progress.Id && p.AppUserId == userId).Max(p => p.LastModified) - }) - .AsNoTracking(); - - var retSeries = series.Where(s => s.AppUserId == userId - && s.PagesRead > 0 - && s.PagesRead < s.Series.Pages) - .OrderByDescending(s => s.LastModified) - .ThenByDescending(s => s.Series.LastModified) - .Select(s => s.Series) - .ProjectTo(_mapper.ConfigurationProvider) - .AsSplitQuery() - .AsNoTracking(); - - // Pagination does not work for this query as when we pull the data back, we get multiple rows of the same series. See controller for pagination code - return await retSeries.ToListAsync(); - } - - public async Task GetSeriesMetadata(int seriesId) - { - var metadataDto = await _context.SeriesMetadata - .Where(metadata => metadata.SeriesId == seriesId) - .Include(m => m.Genres) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .SingleOrDefaultAsync(); - - if (metadataDto != null) - { - metadataDto.Tags = await _context.CollectionTag - .Include(t => t.SeriesMetadatas) - .Where(t => t.SeriesMetadatas.Select(s => s.SeriesId).Contains(seriesId)) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking() - .ToListAsync(); - } - - return metadataDto; - } - - public async Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams) + if (libraryId == 0) { var userLibraries = _context.Library .Include(l => l.AppUsers) @@ -418,118 +366,219 @@ namespace API.Data.Repositories .Select(library => library.Id) .ToList(); - var query = _context.CollectionTag - .Where(s => s.Id == collectionId) - .Include(c => c.SeriesMetadatas) - .ThenInclude(m => m.Series) - .SelectMany(c => c.SeriesMetadatas.Select(sm => sm.Series).Where(s => userLibraries.Contains(s.LibraryId))) - .OrderBy(s => s.LibraryId) - .ThenBy(s => s.SortName) + var allQuery = _context.Series + .Where(s => userLibraries.Contains(s.LibraryId) && formats.Contains(s.Format)) + .OrderByDescending(s => s.Created) .ProjectTo(_mapper.ConfigurationProvider) .AsNoTracking(); - return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); + return await PagedList.CreateAsync(allQuery, userParams.PageNumber, userParams.PageSize); } - public async Task> GetFilesForSeries(int seriesId) - { - return await _context.Volume - .Where(v => v.SeriesId == seriesId) - .Include(v => v.Chapters) - .ThenInclude(c => c.Files) - .SelectMany(v => v.Chapters.SelectMany(c => c.Files)) - .AsNoTracking() - .ToListAsync(); - } + var query = _context.Series + .Where(s => s.LibraryId == libraryId && formats.Contains(s.Format)) + .OrderByDescending(s => s.Created) + .ProjectTo(_mapper.ConfigurationProvider) + .AsSplitQuery() + .AsNoTracking(); - public async Task> GetSeriesDtoForIdsAsync(IEnumerable seriesIds, int userId) + return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); + } + + /// + /// Returns Series that the user has some partial progress on. Sorts based on activity. Sort first by User progress, but if a series + /// has been updated recently, bump it to the front. + /// + /// + /// Library to restrict to, if 0, will apply to all libraries + /// Pagination information + /// Optional (default null) filter on query + /// + public async Task> GetOnDeck(int userId, int libraryId, UserParams userParams, FilterDto filter) + { + var formats = filter.GetSqlFilter(); + IList userLibraries; + if (libraryId == 0) { - var allowedLibraries = _context.Library + userLibraries = _context.Library .Include(l => l.AppUsers) - .Where(library => library.AppUsers.Any(x => x.Id == userId)) - .Select(l => l.Id); - - return await _context.Series - .Where(s => seriesIds.Contains(s.Id) && allowedLibraries.Contains(s.LibraryId)) - .OrderBy(s => s.SortName) - .ProjectTo(_mapper.ConfigurationProvider) + .Where(library => library.AppUsers.Any(user => user.Id == userId)) .AsNoTracking() - .AsSplitQuery() - .ToListAsync(); + .Select(library => library.Id) + .ToList(); + } + else + { + userLibraries = new List() {libraryId}; } - public async Task> GetAllCoverImagesAsync() + var series = _context.Series + .Where(s => formats.Contains(s.Format) && userLibraries.Contains(s.LibraryId)) + .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new + { + Series = s, + PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id && s1.AppUserId == userId).Sum(s1 => s1.PagesRead), + progress.AppUserId, + LastModified = _context.AppUserProgresses.Where(p => p.Id == progress.Id && p.AppUserId == userId).Max(p => p.LastModified) + }) + .AsNoTracking(); + + var retSeries = series.Where(s => s.AppUserId == userId + && s.PagesRead > 0 + && s.PagesRead < s.Series.Pages) + .OrderByDescending(s => s.LastModified) + .ThenByDescending(s => s.Series.LastModified) + .Select(s => s.Series) + .ProjectTo(_mapper.ConfigurationProvider) + .AsSplitQuery() + .AsNoTracking(); + + // Pagination does not work for this query as when we pull the data back, we get multiple rows of the same series. See controller for pagination code + return await retSeries.ToListAsync(); + } + + public async Task GetSeriesMetadata(int seriesId) + { + var metadataDto = await _context.SeriesMetadata + .Where(metadata => metadata.SeriesId == seriesId) + .Include(m => m.Genres) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .SingleOrDefaultAsync(); + + if (metadataDto != null) { - return await _context.Series - .Select(s => s.CoverImage) - .Where(t => !string.IsNullOrEmpty(t)) + metadataDto.Tags = await _context.CollectionTag + .Include(t => t.SeriesMetadatas) + .Where(t => t.SeriesMetadatas.Select(s => s.SeriesId).Contains(seriesId)) + .ProjectTo(_mapper.ConfigurationProvider) .AsNoTracking() .ToListAsync(); } - public async Task> GetLockedCoverImagesAsync() + return metadataDto; + } + + public async Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams) + { + var userLibraries = _context.Library + .Include(l => l.AppUsers) + .Where(library => library.AppUsers.Any(user => user.Id == userId)) + .AsNoTracking() + .Select(library => library.Id) + .ToList(); + + var query = _context.CollectionTag + .Where(s => s.Id == collectionId) + .Include(c => c.SeriesMetadatas) + .ThenInclude(m => m.Series) + .SelectMany(c => c.SeriesMetadatas.Select(sm => sm.Series).Where(s => userLibraries.Contains(s.LibraryId))) + .OrderBy(s => s.LibraryId) + .ThenBy(s => s.SortName) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking(); + + return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); + } + + public async Task> GetFilesForSeries(int seriesId) + { + return await _context.Volume + .Where(v => v.SeriesId == seriesId) + .Include(v => v.Chapters) + .ThenInclude(c => c.Files) + .SelectMany(v => v.Chapters.SelectMany(c => c.Files)) + .AsNoTracking() + .ToListAsync(); + } + + public async Task> GetSeriesDtoForIdsAsync(IEnumerable seriesIds, int userId) + { + var allowedLibraries = _context.Library + .Include(l => l.AppUsers) + .Where(library => library.AppUsers.Any(x => x.Id == userId)) + .Select(l => l.Id); + + return await _context.Series + .Where(s => seriesIds.Contains(s.Id) && allowedLibraries.Contains(s.LibraryId)) + .OrderBy(s => s.SortName) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .AsSplitQuery() + .ToListAsync(); + } + + public async Task> GetAllCoverImagesAsync() + { + return await _context.Series + .Select(s => s.CoverImage) + .Where(t => !string.IsNullOrEmpty(t)) + .AsNoTracking() + .ToListAsync(); + } + + public async Task> GetLockedCoverImagesAsync() + { + return await _context.Series + .Where(s => s.CoverImageLocked && !string.IsNullOrEmpty(s.CoverImage)) + .Select(s => s.CoverImage) + .AsNoTracking() + .ToListAsync(); + } + + /// + /// Returns the number of series for a given library (or all libraries if libraryId is 0) + /// + /// Defaults to 0, library to restrict count to + /// + private async Task GetSeriesCount(int libraryId = 0) + { + if (libraryId > 0) { return await _context.Series - .Where(s => s.CoverImageLocked && !string.IsNullOrEmpty(s.CoverImage)) - .Select(s => s.CoverImage) - .AsNoTracking() - .ToListAsync(); + .Where(s => s.LibraryId == libraryId) + .CountAsync(); } + return await _context.Series.CountAsync(); + } - /// - /// Returns the number of series for a given library (or all libraries if libraryId is 0) - /// - /// Defaults to 0, library to restrict count to - /// - private async Task GetSeriesCount(int libraryId = 0) + /// + /// Returns the number of series that should be processed in parallel to optimize speed and memory. Minimum of 50 + /// + /// Defaults to 0 meaning no library + /// + private async Task> GetChunkSize(int libraryId = 0) + { + var totalSeries = await GetSeriesCount(libraryId); + return new Tuple(totalSeries, 50); + } + + public async Task GetChunkInfo(int libraryId = 0) + { + var (totalSeries, chunkSize) = await GetChunkSize(libraryId); + + if (totalSeries == 0) return new Chunk() { - if (libraryId > 0) - { - return await _context.Series - .Where(s => s.LibraryId == libraryId) - .CountAsync(); - } - return await _context.Series.CountAsync(); - } + TotalChunks = 0, + TotalSize = 0, + ChunkSize = 0 + }; - /// - /// Returns the number of series that should be processed in parallel to optimize speed and memory. Minimum of 50 - /// - /// Defaults to 0 meaning no library - /// - private async Task> GetChunkSize(int libraryId = 0) + var totalChunks = Math.Max((int) Math.Ceiling((totalSeries * 1.0) / chunkSize), 1); + + return new Chunk() { - var totalSeries = await GetSeriesCount(libraryId); - return new Tuple(totalSeries, 50); - } + TotalSize = totalSeries, + ChunkSize = chunkSize, + TotalChunks = totalChunks + }; + } - public async Task GetChunkInfo(int libraryId = 0) - { - var (totalSeries, chunkSize) = await GetChunkSize(libraryId); - - if (totalSeries == 0) return new Chunk() - { - TotalChunks = 0, - TotalSize = 0, - ChunkSize = 0 - }; - - var totalChunks = Math.Max((int) Math.Ceiling((totalSeries * 1.0) / chunkSize), 1); - - return new Chunk() - { - TotalSize = totalSeries, - ChunkSize = chunkSize, - TotalChunks = totalChunks - }; - } - - public async Task> GetSeriesMetadataForIdsAsync(IEnumerable seriesIds) - { - return await _context.SeriesMetadata - .Where(sm => seriesIds.Contains(sm.SeriesId)) - .Include(sm => sm.CollectionTags) - .ToListAsync(); - } + public async Task> GetSeriesMetadataForIdsAsync(IEnumerable seriesIds) + { + return await _context.SeriesMetadata + .Where(sm => seriesIds.Contains(sm.SeriesId)) + .Include(sm => sm.CollectionTags) + .ToListAsync(); } } diff --git a/API/Data/Repositories/SettingsRepository.cs b/API/Data/Repositories/SettingsRepository.cs index 4489cf3bd..be66cbe62 100644 --- a/API/Data/Repositories/SettingsRepository.cs +++ b/API/Data/Repositories/SettingsRepository.cs @@ -4,45 +4,50 @@ using System.Threading.Tasks; using API.DTOs.Settings; using API.Entities; using API.Entities.Enums; -using API.Interfaces.Repositories; using AutoMapper; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface ISettingsRepository { - public class SettingsRepository : ISettingsRepository + void Update(ServerSetting settings); + Task GetSettingsDtoAsync(); + Task GetSettingAsync(ServerSettingKey key); + Task> GetSettingsAsync(); +} +public class SettingsRepository : ISettingsRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public SettingsRepository(DataContext context, IMapper mapper) { - private readonly DataContext _context; - private readonly IMapper _mapper; + _context = context; + _mapper = mapper; + } - public SettingsRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } + public void Update(ServerSetting settings) + { + _context.Entry(settings).State = EntityState.Modified; + } - public void Update(ServerSetting settings) - { - _context.Entry(settings).State = EntityState.Modified; - } + public async Task GetSettingsDtoAsync() + { + var settings = await _context.ServerSetting + .Select(x => x) + .AsNoTracking() + .ToListAsync(); + return _mapper.Map(settings); + } - public async Task GetSettingsDtoAsync() - { - var settings = await _context.ServerSetting - .Select(x => x) - .AsNoTracking() - .ToListAsync(); - return _mapper.Map(settings); - } + public Task GetSettingAsync(ServerSettingKey key) + { + return _context.ServerSetting.SingleOrDefaultAsync(x => x.Key == key); + } - public Task GetSettingAsync(ServerSettingKey key) - { - return _context.ServerSetting.SingleOrDefaultAsync(x => x.Key == key); - } - - public async Task> GetSettingsAsync() - { - return await _context.ServerSetting.ToListAsync(); - } + public async Task> GetSettingsAsync() + { + return await _context.ServerSetting.ToListAsync(); } } diff --git a/API/Data/Repositories/UserRepository.cs b/API/Data/Repositories/UserRepository.cs index ece1356fd..7a520cf6a 100644 --- a/API/Data/Repositories/UserRepository.cs +++ b/API/Data/Repositories/UserRepository.cs @@ -6,254 +6,276 @@ using API.Constants; using API.DTOs; using API.DTOs.Reader; using API.Entities; -using API.Interfaces.Repositories; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +[Flags] +public enum AppUserIncludes { - [Flags] - public enum AppUserIncludes + None = 1, + Progress = 2, + Bookmarks = 4, + ReadingLists = 8, + Ratings = 16 +} + +public interface IUserRepository +{ + void Update(AppUser user); + void Update(AppUserPreferences preferences); + void Update(AppUserBookmark bookmark); + public void Delete(AppUser user); + Task> GetMembersAsync(); + Task> GetAdminUsersAsync(); + Task> GetNonAdminUsersAsync(); + Task IsUserAdmin(AppUser user); + Task GetUserRating(int seriesId, int userId); + Task GetPreferencesAsync(string username); + Task> GetBookmarkDtosForSeries(int userId, int seriesId); + Task> GetBookmarkDtosForVolume(int userId, int volumeId); + Task> GetBookmarkDtosForChapter(int userId, int chapterId); + Task> GetAllBookmarkDtos(int userId); + Task GetBookmarkForPage(int page, int chapterId, int userId); + Task GetUserIdByApiKeyAsync(string apiKey); + Task GetUserByUsernameAsync(string username, AppUserIncludes includeFlags = AppUserIncludes.None); + Task GetUserByIdAsync(int userId, AppUserIncludes includeFlags = AppUserIncludes.None); + Task GetUserIdByUsernameAsync(string username); + Task GetUserWithReadingListsByUsernameAsync(string username); +} + +public class UserRepository : IUserRepository +{ + private readonly DataContext _context; + private readonly UserManager _userManager; + private readonly IMapper _mapper; + + public UserRepository(DataContext context, UserManager userManager, IMapper mapper) { - None = 1, - Progress = 2, - Bookmarks = 4, - ReadingLists = 8, - Ratings = 16 + _context = context; + _userManager = userManager; + _mapper = mapper; } - public class UserRepository : IUserRepository + public void Update(AppUser user) { - private readonly DataContext _context; - private readonly UserManager _userManager; - private readonly IMapper _mapper; + _context.Entry(user).State = EntityState.Modified; + } - public UserRepository(DataContext context, UserManager userManager, IMapper mapper) + public void Update(AppUserPreferences preferences) + { + _context.Entry(preferences).State = EntityState.Modified; + } + + public void Update(AppUserBookmark bookmark) + { + _context.Entry(bookmark).State = EntityState.Modified; + } + + public void Delete(AppUser user) + { + _context.AppUser.Remove(user); + } + + /// + /// A one stop shop to get a tracked AppUser instance with any number of JOINs generated by passing bitwise flags. + /// + /// + /// Includes() you want. Pass multiple with flag1 | flag2 + /// + public async Task GetUserByUsernameAsync(string username, AppUserIncludes includeFlags = AppUserIncludes.None) + { + var query = _context.Users + .Where(x => x.UserName == username); + + query = AddIncludesToQuery(query, includeFlags); + + return await query.SingleOrDefaultAsync(); + } + + /// + /// A one stop shop to get a tracked AppUser instance with any number of JOINs generated by passing bitwise flags. + /// + /// + /// Includes() you want. Pass multiple with flag1 | flag2 + /// + public async Task GetUserByIdAsync(int userId, AppUserIncludes includeFlags = AppUserIncludes.None) + { + var query = _context.Users + .Where(x => x.Id == userId); + + query = AddIncludesToQuery(query, includeFlags); + + return await query.SingleOrDefaultAsync(); + } + + public async Task GetBookmarkForPage(int page, int chapterId, int userId) + { + return await _context.AppUserBookmark + .Where(b => b.Page == page && b.ChapterId == chapterId && b.AppUserId == userId) + .SingleOrDefaultAsync(); + } + + private static IQueryable AddIncludesToQuery(IQueryable query, AppUserIncludes includeFlags) + { + if (includeFlags.HasFlag(AppUserIncludes.Bookmarks)) { - _context = context; - _userManager = userManager; - _mapper = mapper; + query = query.Include(u => u.Bookmarks); } - public void Update(AppUser user) + if (includeFlags.HasFlag(AppUserIncludes.Progress)) { - _context.Entry(user).State = EntityState.Modified; + query = query.Include(u => u.Progresses); } - public void Update(AppUserPreferences preferences) + if (includeFlags.HasFlag(AppUserIncludes.ReadingLists)) { - _context.Entry(preferences).State = EntityState.Modified; + query = query.Include(u => u.ReadingLists); } - public void Update(AppUserBookmark bookmark) + if (includeFlags.HasFlag(AppUserIncludes.Ratings)) { - _context.Entry(bookmark).State = EntityState.Modified; + query = query.Include(u => u.Ratings); } - public void Delete(AppUser user) - { - _context.AppUser.Remove(user); - } + return query; + } - /// - /// A one stop shop to get a tracked AppUser instance with any number of JOINs generated by passing bitwise flags. - /// - /// - /// Includes() you want. Pass multiple with flag1 | flag2 - /// - public async Task GetUserByUsernameAsync(string username, AppUserIncludes includeFlags = AppUserIncludes.None) - { - var query = _context.Users - .Where(x => x.UserName == username); - query = AddIncludesToQuery(query, includeFlags); + /// + /// This fetches the Id for a user. Use whenever you just need an ID. + /// + /// + /// + public async Task GetUserIdByUsernameAsync(string username) + { + return await _context.Users + .Where(x => x.UserName == username) + .Select(u => u.Id) + .SingleOrDefaultAsync(); + } - return await query.SingleOrDefaultAsync(); - } + /// + /// Gets an AppUser by username. Returns back Reading List and their Items. + /// + /// + /// + public async Task GetUserWithReadingListsByUsernameAsync(string username) + { + return await _context.Users + .Include(u => u.ReadingLists) + .ThenInclude(l => l.Items) + .SingleOrDefaultAsync(x => x.UserName == username); + } - /// - /// A one stop shop to get a tracked AppUser instance with any number of JOINs generated by passing bitwise flags. - /// - /// - /// Includes() you want. Pass multiple with flag1 | flag2 - /// - public async Task GetUserByIdAsync(int userId, AppUserIncludes includeFlags = AppUserIncludes.None) - { - var query = _context.Users - .Where(x => x.Id == userId); + public async Task> GetAdminUsersAsync() + { + return await _userManager.GetUsersInRoleAsync(PolicyConstants.AdminRole); + } - query = AddIncludesToQuery(query, includeFlags); + public async Task> GetNonAdminUsersAsync() + { + return await _userManager.GetUsersInRoleAsync(PolicyConstants.PlebRole); + } - return await query.SingleOrDefaultAsync(); - } + public async Task IsUserAdmin(AppUser user) + { + return await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole); + } - public async Task GetBookmarkForPage(int page, int chapterId, int userId) - { - return await _context.AppUserBookmark - .Where(b => b.Page == page && b.ChapterId == chapterId && b.AppUserId == userId) - .SingleOrDefaultAsync(); - } + public async Task GetUserRating(int seriesId, int userId) + { + return await _context.AppUserRating.Where(r => r.SeriesId == seriesId && r.AppUserId == userId) + .SingleOrDefaultAsync(); + } - private static IQueryable AddIncludesToQuery(IQueryable query, AppUserIncludes includeFlags) - { - if (includeFlags.HasFlag(AppUserIncludes.Bookmarks)) + public async Task GetPreferencesAsync(string username) + { + return await _context.AppUserPreferences + .Include(p => p.AppUser) + .SingleOrDefaultAsync(p => p.AppUser.UserName == username); + } + + public async Task> GetBookmarkDtosForSeries(int userId, int seriesId) + { + return await _context.AppUserBookmark + .Where(x => x.AppUserId == userId && x.SeriesId == seriesId) + .OrderBy(x => x.Page) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + public async Task> GetBookmarkDtosForVolume(int userId, int volumeId) + { + return await _context.AppUserBookmark + .Where(x => x.AppUserId == userId && x.VolumeId == volumeId) + .OrderBy(x => x.Page) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + public async Task> GetBookmarkDtosForChapter(int userId, int chapterId) + { + return await _context.AppUserBookmark + .Where(x => x.AppUserId == userId && x.ChapterId == chapterId) + .OrderBy(x => x.Page) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + public async Task> GetAllBookmarkDtos(int userId) + { + return await _context.AppUserBookmark + .Where(x => x.AppUserId == userId) + .OrderBy(x => x.Page) + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + + /// + /// Fetches the UserId by API Key. This does not include any extra information + /// + /// + /// + public async Task GetUserIdByApiKeyAsync(string apiKey) + { + return await _context.AppUser + .Where(u => u.ApiKey.Equals(apiKey)) + .Select(u => u.Id) + .SingleOrDefaultAsync(); + } + + + public async Task> GetMembersAsync() + { + return await _context.Users + .Include(x => x.Libraries) + .Include(r => r.UserRoles) + .ThenInclude(r => r.Role) + .OrderBy(u => u.UserName) + .Select(u => new MemberDto { - query = query.Include(u => u.Bookmarks); - } - - if (includeFlags.HasFlag(AppUserIncludes.Progress)) - { - query = query.Include(u => u.Progresses); - } - - if (includeFlags.HasFlag(AppUserIncludes.ReadingLists)) - { - query = query.Include(u => u.ReadingLists); - } - - if (includeFlags.HasFlag(AppUserIncludes.Ratings)) - { - query = query.Include(u => u.Ratings); - } - - return query; - } - - - /// - /// This fetches the Id for a user. Use whenever you just need an ID. - /// - /// - /// - public async Task GetUserIdByUsernameAsync(string username) - { - return await _context.Users - .Where(x => x.UserName == username) - .Select(u => u.Id) - .SingleOrDefaultAsync(); - } - - /// - /// Gets an AppUser by username. Returns back Reading List and their Items. - /// - /// - /// - public async Task GetUserWithReadingListsByUsernameAsync(string username) - { - return await _context.Users - .Include(u => u.ReadingLists) - .ThenInclude(l => l.Items) - .SingleOrDefaultAsync(x => x.UserName == username); - } - - public async Task> GetAdminUsersAsync() - { - return await _userManager.GetUsersInRoleAsync(PolicyConstants.AdminRole); - } - - public async Task> GetNonAdminUsersAsync() - { - return await _userManager.GetUsersInRoleAsync(PolicyConstants.PlebRole); - } - - public async Task IsUserAdmin(AppUser user) - { - return await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole); - } - - public async Task GetUserRating(int seriesId, int userId) - { - return await _context.AppUserRating.Where(r => r.SeriesId == seriesId && r.AppUserId == userId) - .SingleOrDefaultAsync(); - } - - public async Task GetPreferencesAsync(string username) - { - return await _context.AppUserPreferences - .Include(p => p.AppUser) - .SingleOrDefaultAsync(p => p.AppUser.UserName == username); - } - - public async Task> GetBookmarkDtosForSeries(int userId, int seriesId) - { - return await _context.AppUserBookmark - .Where(x => x.AppUserId == userId && x.SeriesId == seriesId) - .OrderBy(x => x.Page) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - public async Task> GetBookmarkDtosForVolume(int userId, int volumeId) - { - return await _context.AppUserBookmark - .Where(x => x.AppUserId == userId && x.VolumeId == volumeId) - .OrderBy(x => x.Page) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - public async Task> GetBookmarkDtosForChapter(int userId, int chapterId) - { - return await _context.AppUserBookmark - .Where(x => x.AppUserId == userId && x.ChapterId == chapterId) - .OrderBy(x => x.Page) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - public async Task> GetAllBookmarkDtos(int userId) - { - return await _context.AppUserBookmark - .Where(x => x.AppUserId == userId) - .OrderBy(x => x.Page) - .AsNoTracking() - .ProjectTo(_mapper.ConfigurationProvider) - .ToListAsync(); - } - - /// - /// Fetches the UserId by API Key. This does not include any extra information - /// - /// - /// - public async Task GetUserIdByApiKeyAsync(string apiKey) - { - return await _context.AppUser - .Where(u => u.ApiKey.Equals(apiKey)) - .Select(u => u.Id) - .SingleOrDefaultAsync(); - } - - - public async Task> GetMembersAsync() - { - return await _context.Users - .Include(x => x.Libraries) - .Include(r => r.UserRoles) - .ThenInclude(r => r.Role) - .OrderBy(u => u.UserName) - .Select(u => new MemberDto + Id = u.Id, + Username = u.UserName, + Created = u.Created, + LastActive = u.LastActive, + Roles = u.UserRoles.Select(r => r.Role.Name).ToList(), + Libraries = u.Libraries.Select(l => new LibraryDto { - Id = u.Id, - Username = u.UserName, - Created = u.Created, - LastActive = u.LastActive, - Roles = u.UserRoles.Select(r => r.Role.Name).ToList(), - Libraries = u.Libraries.Select(l => new LibraryDto - { - Name = l.Name, - Type = l.Type, - LastScanned = l.LastScanned, - Folders = l.Folders.Select(x => x.Path).ToList() - }).ToList() - }) - .AsNoTracking() - .ToListAsync(); - } + Name = l.Name, + Type = l.Type, + LastScanned = l.LastScanned, + Folders = l.Folders.Select(x => x.Path).ToList() + }).ToList() + }) + .AsNoTracking() + .ToListAsync(); } } diff --git a/API/Data/Repositories/VolumeRepository.cs b/API/Data/Repositories/VolumeRepository.cs index e4d0f84a2..7d2d76d2d 100644 --- a/API/Data/Repositories/VolumeRepository.cs +++ b/API/Data/Repositories/VolumeRepository.cs @@ -4,206 +4,220 @@ using System.Threading.Tasks; using API.Comparators; using API.DTOs; using API.Entities; -using API.Interfaces.Repositories; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -namespace API.Data.Repositories +namespace API.Data.Repositories; + +public interface IVolumeRepository { - public class VolumeRepository : IVolumeRepository - { - private readonly DataContext _context; - private readonly IMapper _mapper; - - public VolumeRepository(DataContext context, IMapper mapper) - { - _context = context; - _mapper = mapper; - } - - public void Add(Volume volume) - { - _context.Volume.Add(volume); - } - - public void Update(Volume volume) - { - _context.Entry(volume).State = EntityState.Modified; - } - - public void Remove(Volume volume) - { - _context.Volume.Remove(volume); - } - - /// - /// Returns a list of non-tracked files for a given volume. - /// - /// - /// - public async Task> GetFilesForVolume(int volumeId) - { - return await _context.Chapter - .Where(c => volumeId == c.VolumeId) - .Include(c => c.Files) - .SelectMany(c => c.Files) - .AsNoTracking() - .ToListAsync(); - } - - /// - /// Returns the cover image file for the given volume - /// - /// - /// - public async Task GetVolumeCoverImageAsync(int volumeId) - { - return await _context.Volume - .Where(v => v.Id == volumeId) - .Select(v => v.CoverImage) - .AsNoTracking() - .SingleOrDefaultAsync(); - } - - /// - /// Returns all chapter Ids belonging to a list of Volume Ids - /// - /// - /// - public async Task> GetChapterIdsByVolumeIds(IReadOnlyList volumeIds) - { - return await _context.Chapter - .Where(c => volumeIds.Contains(c.VolumeId)) - .Select(c => c.Id) - .ToListAsync(); - } - - /// - /// Returns all volumes that contain a seriesId in passed array. - /// - /// - /// - public async Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false) - { - var query = _context.Volume - .Where(v => seriesIds.Contains(v.SeriesId)); - - if (includeChapters) - { - query = query.Include(v => v.Chapters); - } - return await query.ToListAsync(); - } - - /// - /// Returns an individual Volume including Chapters and Files and Reading Progress for a given volumeId - /// - /// - /// - /// - public async Task GetVolumeDtoAsync(int volumeId, int userId) - { - var volume = await _context.Volume - .Where(vol => vol.Id == volumeId) - .Include(vol => vol.Chapters) - .ThenInclude(c => c.Files) - .ProjectTo(_mapper.ConfigurationProvider) - .SingleAsync(vol => vol.Id == volumeId); - - var volumeList = new List() {volume}; - await AddVolumeModifiers(userId, volumeList); - - return volumeList[0]; - } - - /// - /// Returns the full Volumes including Chapters and Files for a given series - /// - /// - /// - public async Task> GetVolumes(int seriesId) - { - return await _context.Volume - .Where(vol => vol.SeriesId == seriesId) - .Include(vol => vol.Chapters) - .ThenInclude(c => c.Files) - .OrderBy(vol => vol.Number) - .ToListAsync(); - } - - /// - /// Returns a single volume with Chapter and Files - /// - /// - /// - public async Task GetVolumeAsync(int volumeId) - { - return await _context.Volume - .Include(vol => vol.Chapters) - .ThenInclude(c => c.Files) - .SingleOrDefaultAsync(vol => vol.Id == volumeId); - } - - - /// - /// Returns all volumes for a given series with progress information attached. Includes all Chapters as well. - /// - /// - /// - /// - public async Task> GetVolumesDtoAsync(int seriesId, int userId) - { - var volumes = await _context.Volume - .Where(vol => vol.SeriesId == seriesId) - .Include(vol => vol.Chapters) - .ThenInclude(c => c.People) // TODO: Measure cost of this - .OrderBy(volume => volume.Number) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking() - .ToListAsync(); - - await AddVolumeModifiers(userId, volumes); - SortSpecialChapters(volumes); - - return volumes; - } - - public async Task GetVolumeByIdAsync(int volumeId) - { - return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId); - } - - - private static void SortSpecialChapters(IEnumerable volumes) - { - var sorter = new NaturalSortComparer(); - foreach (var v in volumes.Where(vDto => vDto.Number == 0)) - { - v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList(); - } - } - - - private async Task AddVolumeModifiers(int userId, IReadOnlyCollection volumes) - { - var volIds = volumes.Select(s => s.Id); - var userProgress = await _context.AppUserProgresses - .Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId)) - .AsNoTracking() - .ToListAsync(); - - foreach (var v in volumes) - { - foreach (var c in v.Chapters) - { - c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead); - } - - v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead); - } - } - - - } + void Add(Volume volume); + void Update(Volume volume); + void Remove(Volume volume); + Task> GetFilesForVolume(int volumeId); + Task GetVolumeCoverImageAsync(int volumeId); + Task> GetChapterIdsByVolumeIds(IReadOnlyList volumeIds); + Task> GetVolumesDtoAsync(int seriesId, int userId); + Task GetVolumeAsync(int volumeId); + Task GetVolumeDtoAsync(int volumeId, int userId); + Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false); + Task> GetVolumes(int seriesId); + Task GetVolumeByIdAsync(int volumeId); +} +public class VolumeRepository : IVolumeRepository +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + + public VolumeRepository(DataContext context, IMapper mapper) + { + _context = context; + _mapper = mapper; + } + + public void Add(Volume volume) + { + _context.Volume.Add(volume); + } + + public void Update(Volume volume) + { + _context.Entry(volume).State = EntityState.Modified; + } + + public void Remove(Volume volume) + { + _context.Volume.Remove(volume); + } + + /// + /// Returns a list of non-tracked files for a given volume. + /// + /// + /// + public async Task> GetFilesForVolume(int volumeId) + { + return await _context.Chapter + .Where(c => volumeId == c.VolumeId) + .Include(c => c.Files) + .SelectMany(c => c.Files) + .AsNoTracking() + .ToListAsync(); + } + + /// + /// Returns the cover image file for the given volume + /// + /// + /// + public async Task GetVolumeCoverImageAsync(int volumeId) + { + return await _context.Volume + .Where(v => v.Id == volumeId) + .Select(v => v.CoverImage) + .AsNoTracking() + .SingleOrDefaultAsync(); + } + + /// + /// Returns all chapter Ids belonging to a list of Volume Ids + /// + /// + /// + public async Task> GetChapterIdsByVolumeIds(IReadOnlyList volumeIds) + { + return await _context.Chapter + .Where(c => volumeIds.Contains(c.VolumeId)) + .Select(c => c.Id) + .ToListAsync(); + } + + /// + /// Returns all volumes that contain a seriesId in passed array. + /// + /// + /// Include chapter entities + /// + public async Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false) + { + var query = _context.Volume + .Where(v => seriesIds.Contains(v.SeriesId)); + + if (includeChapters) + { + query = query.Include(v => v.Chapters); + } + return await query.ToListAsync(); + } + + /// + /// Returns an individual Volume including Chapters and Files and Reading Progress for a given volumeId + /// + /// + /// + /// + public async Task GetVolumeDtoAsync(int volumeId, int userId) + { + var volume = await _context.Volume + .Where(vol => vol.Id == volumeId) + .Include(vol => vol.Chapters) + .ThenInclude(c => c.Files) + .ProjectTo(_mapper.ConfigurationProvider) + .SingleAsync(vol => vol.Id == volumeId); + + var volumeList = new List() {volume}; + await AddVolumeModifiers(userId, volumeList); + + return volumeList[0]; + } + + /// + /// Returns the full Volumes including Chapters and Files for a given series + /// + /// + /// + public async Task> GetVolumes(int seriesId) + { + return await _context.Volume + .Where(vol => vol.SeriesId == seriesId) + .Include(vol => vol.Chapters) + .ThenInclude(c => c.Files) + .OrderBy(vol => vol.Number) + .ToListAsync(); + } + + /// + /// Returns a single volume with Chapter and Files + /// + /// + /// + public async Task GetVolumeAsync(int volumeId) + { + return await _context.Volume + .Include(vol => vol.Chapters) + .ThenInclude(c => c.Files) + .SingleOrDefaultAsync(vol => vol.Id == volumeId); + } + + + /// + /// Returns all volumes for a given series with progress information attached. Includes all Chapters as well. + /// + /// + /// + /// + public async Task> GetVolumesDtoAsync(int seriesId, int userId) + { + var volumes = await _context.Volume + .Where(vol => vol.SeriesId == seriesId) + .Include(vol => vol.Chapters) + .ThenInclude(c => c.People) + .OrderBy(volume => volume.Number) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .ToListAsync(); + + await AddVolumeModifiers(userId, volumes); + SortSpecialChapters(volumes); + + return volumes; + } + + public async Task GetVolumeByIdAsync(int volumeId) + { + return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId); + } + + + private static void SortSpecialChapters(IEnumerable volumes) + { + var sorter = new NaturalSortComparer(); + foreach (var v in volumes.Where(vDto => vDto.Number == 0)) + { + v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList(); + } + } + + + private async Task AddVolumeModifiers(int userId, IReadOnlyCollection volumes) + { + var volIds = volumes.Select(s => s.Id); + var userProgress = await _context.AppUserProgresses + .Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId)) + .AsNoTracking() + .ToListAsync(); + + foreach (var v in volumes) + { + foreach (var c in v.Chapters) + { + c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead); + } + + v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead); + } + } + + } diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index 9cfbaeaa4..b03f1fec1 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -35,13 +35,13 @@ namespace API.Data } } - public static async Task SeedSettings(DataContext context) + public static async Task SeedSettings(DataContext context, IDirectoryService directoryService) { await context.Database.EnsureCreatedAsync(); IList defaultSettings = new List() { - new () {Key = ServerSettingKey.CacheDirectory, Value = DirectoryService.CacheDirectory}, + new () {Key = ServerSettingKey.CacheDirectory, Value = directoryService.CacheDirectory}, new () {Key = ServerSettingKey.TaskScan, Value = "daily"}, new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, // Not used from DB, but DB is sync with appSettings.json new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, @@ -71,7 +71,7 @@ namespace API.Data context.ServerSetting.First(s => s.Key == ServerSettingKey.LoggingLevel).Value = Configuration.LogLevel + string.Empty; context.ServerSetting.First(s => s.Key == ServerSettingKey.CacheDirectory).Value = - DirectoryService.CacheDirectory + string.Empty; + directoryService.CacheDirectory + string.Empty; context.ServerSetting.First(s => s.Key == ServerSettingKey.BackupDirectory).Value = DirectoryService.BackupDirectory + string.Empty; diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index 2fde0580f..42ef365ea 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -1,85 +1,102 @@ using System.Threading.Tasks; using API.Data.Repositories; using API.Entities; -using API.Interfaces; -using API.Interfaces.Repositories; using AutoMapper; using Microsoft.AspNetCore.Identity; -namespace API.Data +namespace API.Data; + +public interface IUnitOfWork { - public class UnitOfWork : IUnitOfWork + ISeriesRepository SeriesRepository { get; } + IUserRepository UserRepository { get; } + ILibraryRepository LibraryRepository { get; } + IVolumeRepository VolumeRepository { get; } + ISettingsRepository SettingsRepository { get; } + IAppUserProgressRepository AppUserProgressRepository { get; } + ICollectionTagRepository CollectionTagRepository { get; } + IChapterRepository ChapterRepository { get; } + IReadingListRepository ReadingListRepository { get; } + ISeriesMetadataRepository SeriesMetadataRepository { get; } + IPersonRepository PersonRepository { get; } + IGenreRepository GenreRepository { get; } + bool Commit(); + Task CommitAsync(); + bool HasChanges(); + bool Rollback(); + Task RollbackAsync(); +} +public class UnitOfWork : IUnitOfWork +{ + private readonly DataContext _context; + private readonly IMapper _mapper; + private readonly UserManager _userManager; + + public UnitOfWork(DataContext context, IMapper mapper, UserManager userManager) { - private readonly DataContext _context; - private readonly IMapper _mapper; - private readonly UserManager _userManager; + _context = context; + _mapper = mapper; + _userManager = userManager; + } - public UnitOfWork(DataContext context, IMapper mapper, UserManager userManager) - { - _context = context; - _mapper = mapper; - _userManager = userManager; - } + public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper); + public IUserRepository UserRepository => new UserRepository(_context, _userManager, _mapper); + public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper); - public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper); - public IUserRepository UserRepository => new UserRepository(_context, _userManager, _mapper); - public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper); + public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper); - public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper); + public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper); - public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper); + public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); + public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper); + public IChapterRepository ChapterRepository => new ChapterRepository(_context, _mapper); + public IReadingListRepository ReadingListRepository => new ReadingListRepository(_context, _mapper); + public ISeriesMetadataRepository SeriesMetadataRepository => new SeriesMetadataRepository(_context); + public IPersonRepository PersonRepository => new PersonRepository(_context, _mapper); + public IGenreRepository GenreRepository => new GenreRepository(_context, _mapper); - public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); - public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper); - public IChapterRepository ChapterRepository => new ChapterRepository(_context, _mapper); - public IReadingListRepository ReadingListRepository => new ReadingListRepository(_context, _mapper); - public ISeriesMetadataRepository SeriesMetadataRepository => new SeriesMetadataRepository(_context); - public IPersonRepository PersonRepository => new PersonRepository(_context, _mapper); - public IGenreRepository GenreRepository => new GenreRepository(_context, _mapper); + /// + /// Commits changes to the DB. Completes the open transaction. + /// + /// + public bool Commit() + { + return _context.SaveChanges() > 0; + } + /// + /// Commits changes to the DB. Completes the open transaction. + /// + /// + public async Task CommitAsync() + { + return await _context.SaveChangesAsync() > 0; + } - /// - /// Commits changes to the DB. Completes the open transaction. - /// - /// - public bool Commit() - { - return _context.SaveChanges() > 0; - } - /// - /// Commits changes to the DB. Completes the open transaction. - /// - /// - public async Task CommitAsync() - { - return await _context.SaveChangesAsync() > 0; - } + /// + /// Is the DB Context aware of Changes in loaded entities + /// + /// + public bool HasChanges() + { + return _context.ChangeTracker.HasChanges(); + } - /// - /// Is the DB Context aware of Changes in loaded entities - /// - /// - public bool HasChanges() - { - return _context.ChangeTracker.HasChanges(); - } - - /// - /// Rollback transaction - /// - /// - public async Task RollbackAsync() - { - await _context.DisposeAsync(); - return true; - } - /// - /// Rollback transaction - /// - /// - public bool Rollback() - { - _context.Dispose(); - return true; - } + /// + /// Rollback transaction + /// + /// + public async Task RollbackAsync() + { + await _context.DisposeAsync(); + return true; + } + /// + /// Rollback transaction + /// + /// + public bool Rollback() + { + _context.Dispose(); + return true; } } diff --git a/API/Entities/Chapter.cs b/API/Entities/Chapter.cs index 41bc62cd7..84f8f978d 100644 --- a/API/Entities/Chapter.cs +++ b/API/Entities/Chapter.cs @@ -2,7 +2,6 @@ using System.Collections.Generic; using API.Entities.Enums; using API.Entities.Interfaces; -using API.Entities.Metadata; using API.Parser; namespace API.Entities diff --git a/API/Entities/Genre.cs b/API/Entities/Genre.cs index fbd64852e..447f14943 100644 --- a/API/Entities/Genre.cs +++ b/API/Entities/Genre.cs @@ -8,7 +8,7 @@ namespace API.Entities public class Genre { public int Id { get; set; } - public string Title { get; set; } // TODO: Rename this to Title + public string Title { get; set; } public string NormalizedTitle { get; set; } public bool ExternalTag { get; set; } diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index 9974cad2d..ef2525547 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -1,8 +1,6 @@ using System.IO.Abstractions; using API.Data; using API.Helpers; -using API.Interfaces; -using API.Interfaces.Services; using API.Services; using API.Services.Tasks; using API.SignalR.Presence; @@ -37,6 +35,7 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); services.AddScoped(); + services.AddScoped(); services.AddScoped(); diff --git a/API/Extensions/DirectoryInfoExtensions.cs b/API/Extensions/DirectoryInfoExtensions.cs deleted file mode 100644 index b92901046..000000000 --- a/API/Extensions/DirectoryInfoExtensions.cs +++ /dev/null @@ -1,86 +0,0 @@ -using System.IO; -using System.Linq; -using API.Comparators; - -namespace API.Extensions -{ - public static class DirectoryInfoExtensions - { - private static readonly NaturalSortComparer Comparer = new NaturalSortComparer(); - public static void Empty(this DirectoryInfo directory) - { - // NOTE: We have this in DirectoryService.Empty(), do we need this here? - foreach(FileInfo file in directory.EnumerateFiles()) file.Delete(); - foreach(DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true); - } - - public static void RemoveNonImages(this DirectoryInfo directory) - { - foreach (var file in directory.EnumerateFiles()) - { - if (!Parser.Parser.IsImage(file.FullName)) - { - file.Delete(); - } - } - } - - /// - /// Flattens all files in subfolders to the passed directory recursively. - /// - /// - /// foo - /// ├── 1.txt - /// ├── 2.txt - /// ├── 3.txt - /// ├── 4.txt - /// └── bar - /// ├── 1.txt - /// ├── 2.txt - /// └── 5.txt - /// - /// becomes: - /// foo - /// ├── 1.txt - /// ├── 2.txt - /// ├── 3.txt - /// ├── 4.txt - /// ├── bar_1.txt - /// ├── bar_2.txt - /// └── bar_5.txt - /// - /// - public static void Flatten(this DirectoryInfo directory) - { - var index = 0; - FlattenDirectory(directory, directory, ref index); - } - - private static void FlattenDirectory(DirectoryInfo root, DirectoryInfo directory, ref int directoryIndex) - { - if (!root.FullName.Equals(directory.FullName)) - { - var fileIndex = 1; - - foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, Comparer)) - { - if (file.Directory == null) continue; - var paddedIndex = Parser.Parser.PadZeros(directoryIndex + ""); - // We need to rename the files so that after flattening, they are in the order we found them - var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}"; - var newPath = Path.Join(root.FullName, newName); - if (!File.Exists(newPath)) file.MoveTo(newPath); - fileIndex++; - } - - directoryIndex++; - } - - var sort = new NaturalSortComparer(); - foreach (var subDirectory in directory.EnumerateDirectories().OrderBy(d => d.FullName, sort)) - { - FlattenDirectory(root, subDirectory, ref directoryIndex); - } - } - } -} diff --git a/API/Extensions/EnumerableExtensions.cs b/API/Extensions/EnumerableExtensions.cs index 7bc5a378f..7bf24b639 100644 --- a/API/Extensions/EnumerableExtensions.cs +++ b/API/Extensions/EnumerableExtensions.cs @@ -1,10 +1,4 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; - -namespace API.Extensions +namespace API.Extensions { public static class EnumerableExtensions { diff --git a/API/Extensions/HttpExtensions.cs b/API/Extensions/HttpExtensions.cs index 975cbde5f..68655f43d 100644 --- a/API/Extensions/HttpExtensions.cs +++ b/API/Extensions/HttpExtensions.cs @@ -1,5 +1,7 @@ using System.IO; using System.Linq; +using System.Runtime.Intrinsics.Arm; +using System.Security.Cryptography; using System.Text; using System.Text.Json; using API.Helpers; @@ -30,7 +32,8 @@ namespace API.Extensions public static void AddCacheHeader(this HttpResponse response, byte[] content) { if (content == null || content.Length <= 0) return; - using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider(); + using var sha1 = SHA256.Create(); + response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(content).Select(x => x.ToString("X2")))); } @@ -43,7 +46,7 @@ namespace API.Extensions { if (filename == null || filename.Length <= 0) return; var hashContent = filename + File.GetLastWriteTimeUtc(filename); - using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider(); + using var sha1 = SHA256.Create(); response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2")))); } diff --git a/API/Helpers/CacheHelper.cs b/API/Helpers/CacheHelper.cs index a5febb76f..382c0dac7 100644 --- a/API/Helpers/CacheHelper.cs +++ b/API/Helpers/CacheHelper.cs @@ -1,5 +1,4 @@ using System; -using System.IO; using API.Entities; using API.Entities.Interfaces; using API.Services; diff --git a/API/Helpers/GenreHelper.cs b/API/Helpers/GenreHelper.cs index 8d897314b..a9dc41782 100644 --- a/API/Helpers/GenreHelper.cs +++ b/API/Helpers/GenreHelper.cs @@ -36,25 +36,13 @@ public static class GenreHelper public static void KeepOnlySameGenreBetweenLists(ICollection existingGenres, ICollection removeAllExcept, Action action = null) { - // var normalizedNames = names.Select(s => Parser.Parser.Normalize(s.Trim())) - // .Where(s => !string.IsNullOrEmpty(s)).ToList(); - // var localNamesNotInComicInfos = seriesGenres.Where(g => - // !normalizedNames.Contains(g.NormalizedName) && g.ExternalTag == isExternal); - // - // foreach (var nonExisting in localNamesNotInComicInfos) - // { - // // TODO: Maybe I need to do a cleanup here - // action(nonExisting); - // } var existing = existingGenres.ToList(); foreach (var genre in existing) { var existingPerson = removeAllExcept.FirstOrDefault(g => g.ExternalTag == genre.ExternalTag && genre.NormalizedTitle.Equals(g.NormalizedTitle)); - if (existingPerson == null) - { - existingGenres.Remove(genre); - action?.Invoke(genre); - } + if (existingPerson != null) continue; + existingGenres.Remove(genre); + action?.Invoke(genre); } } diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs deleted file mode 100644 index 215cccf80..000000000 --- a/API/Interfaces/ITaskScheduler.cs +++ /dev/null @@ -1,22 +0,0 @@ -using System.Threading.Tasks; - -namespace API.Interfaces -{ - public interface ITaskScheduler - { - /// - /// For use on Server startup - /// - void ScheduleTasks(); - Task ScheduleStatsTasks(); - void ScheduleUpdaterTasks(); - void ScanLibrary(int libraryId, bool forceUpdate = false); - void CleanupChapters(int[] chapterIds); - void RefreshMetadata(int libraryId, bool forceUpdate = true); - void CleanupTemp(); - void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false); - void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false); - void CancelStatsTasks(); - Task RunStatCollection(); - } -} diff --git a/API/Interfaces/IUnitOfWork.cs b/API/Interfaces/IUnitOfWork.cs deleted file mode 100644 index 8fba1c03a..000000000 --- a/API/Interfaces/IUnitOfWork.cs +++ /dev/null @@ -1,26 +0,0 @@ -using System.Threading.Tasks; -using API.Interfaces.Repositories; - -namespace API.Interfaces -{ - public interface IUnitOfWork - { - ISeriesRepository SeriesRepository { get; } - IUserRepository UserRepository { get; } - ILibraryRepository LibraryRepository { get; } - IVolumeRepository VolumeRepository { get; } - ISettingsRepository SettingsRepository { get; } - IAppUserProgressRepository AppUserProgressRepository { get; } - ICollectionTagRepository CollectionTagRepository { get; } - IChapterRepository ChapterRepository { get; } - IReadingListRepository ReadingListRepository { get; } - ISeriesMetadataRepository SeriesMetadataRepository { get; } - IPersonRepository PersonRepository { get; } - IGenreRepository GenreRepository { get; } - bool Commit(); - Task CommitAsync(); - bool HasChanges(); - bool Rollback(); - Task RollbackAsync(); - } -} diff --git a/API/Interfaces/Repositories/IAppUserProgressRepository.cs b/API/Interfaces/Repositories/IAppUserProgressRepository.cs deleted file mode 100644 index d37198fb2..000000000 --- a/API/Interfaces/Repositories/IAppUserProgressRepository.cs +++ /dev/null @@ -1,14 +0,0 @@ -using System.Threading.Tasks; -using API.Entities; -using API.Entities.Enums; - -namespace API.Interfaces.Repositories -{ - public interface IAppUserProgressRepository - { - void Update(AppUserProgress userProgress); - Task CleanupAbandonedChapters(); - Task UserHasProgress(LibraryType libraryType, int userId); - Task GetUserProgressAsync(int chapterId, int userId); - } -} diff --git a/API/Interfaces/Repositories/IChapterRepository.cs b/API/Interfaces/Repositories/IChapterRepository.cs deleted file mode 100644 index 9ce145f4c..000000000 --- a/API/Interfaces/Repositories/IChapterRepository.cs +++ /dev/null @@ -1,24 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs; -using API.DTOs.Reader; -using API.Entities; - -namespace API.Interfaces.Repositories -{ - public interface IChapterRepository - { - void Update(Chapter chapter); - Task> GetChaptersByIdsAsync(IList chapterIds); - Task GetChapterInfoDtoAsync(int chapterId); - Task GetChapterTotalPagesAsync(int chapterId); - Task GetChapterAsync(int chapterId); - Task GetChapterDtoAsync(int chapterId); - Task> GetFilesForChapterAsync(int chapterId); - Task> GetChaptersAsync(int volumeId); - Task> GetFilesForChaptersAsync(IReadOnlyList chapterIds); - Task GetChapterCoverImageAsync(int chapterId); - Task> GetAllCoverImagesAsync(); - Task> GetCoverImagesForLockedChaptersAsync(); - } -} diff --git a/API/Interfaces/Repositories/ICollectionTagRepository.cs b/API/Interfaces/Repositories/ICollectionTagRepository.cs deleted file mode 100644 index 6cb422a00..000000000 --- a/API/Interfaces/Repositories/ICollectionTagRepository.cs +++ /dev/null @@ -1,23 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs.CollectionTags; -using API.Entities; - -namespace API.Interfaces.Repositories -{ - public interface ICollectionTagRepository - { - void Add(CollectionTag tag); - void Remove(CollectionTag tag); - Task> GetAllTagDtosAsync(); - Task> SearchTagDtosAsync(string searchQuery); - Task GetCoverImageAsync(int collectionTagId); - Task> GetAllPromotedTagDtosAsync(); - Task GetTagAsync(int tagId); - Task GetFullTagAsync(int tagId); - void Update(CollectionTag tag); - Task RemoveTagsWithoutSeries(); - Task> GetAllTagsAsync(); - Task> GetAllCoverImagesAsync(); - } -} diff --git a/API/Interfaces/Repositories/IGenreRepository.cs b/API/Interfaces/Repositories/IGenreRepository.cs deleted file mode 100644 index 72a3cca55..000000000 --- a/API/Interfaces/Repositories/IGenreRepository.cs +++ /dev/null @@ -1,15 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Entities; - -namespace API.Interfaces.Repositories -{ - public interface IGenreRepository - { - void Attach(Genre genre); - void Remove(Genre genre); - Task FindByNameAsync(string genreName); - Task> GetAllGenres(); - Task RemoveAllGenreNoLongerAssociated(bool removeExternal = false); - } -} diff --git a/API/Interfaces/Repositories/ILibraryRepository.cs b/API/Interfaces/Repositories/ILibraryRepository.cs deleted file mode 100644 index 1ba6ac910..000000000 --- a/API/Interfaces/Repositories/ILibraryRepository.cs +++ /dev/null @@ -1,26 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Data.Repositories; -using API.DTOs; -using API.Entities; -using API.Entities.Enums; - -namespace API.Interfaces.Repositories -{ - public interface ILibraryRepository - { - void Add(Library library); - void Update(Library library); - void Delete(Library library); - Task> GetLibraryDtosAsync(); - Task LibraryExists(string libraryName); - Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes); - Task GetFullLibraryForIdAsync(int libraryId); - Task GetFullLibraryForIdAsync(int libraryId, int seriesId); - Task> GetLibraryDtosForUsernameAsync(string userName); - Task> GetLibrariesAsync(); - Task DeleteLibrary(int libraryId); - Task> GetLibrariesForUserIdAsync(int userId); - Task GetLibraryTypeAsync(int libraryId); - } -} diff --git a/API/Interfaces/Repositories/IPersonRepository.cs b/API/Interfaces/Repositories/IPersonRepository.cs deleted file mode 100644 index dc83bd14f..000000000 --- a/API/Interfaces/Repositories/IPersonRepository.cs +++ /dev/null @@ -1,14 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Entities; - -namespace API.Interfaces.Repositories -{ - public interface IPersonRepository - { - void Attach(Person person); - void Remove(Person person); - Task> GetAllPeople(); - Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false); - } -} diff --git a/API/Interfaces/Repositories/IReadingListRepository.cs b/API/Interfaces/Repositories/IReadingListRepository.cs deleted file mode 100644 index 8b5ab085d..000000000 --- a/API/Interfaces/Repositories/IReadingListRepository.cs +++ /dev/null @@ -1,22 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs.ReadingLists; -using API.Entities; -using API.Helpers; - -namespace API.Interfaces.Repositories -{ - public interface IReadingListRepository - { - Task> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams); - Task GetReadingListByIdAsync(int readingListId); - Task> GetReadingListItemDtosByIdAsync(int readingListId, int userId); - Task GetReadingListDtoByIdAsync(int readingListId, int userId); - Task> AddReadingProgressModifiers(int userId, IList items); - Task GetReadingListDtoByTitleAsync(string title); - Task> GetReadingListItemsByIdAsync(int readingListId); - void Remove(ReadingListItem item); - void BulkRemove(IEnumerable items); - void Update(ReadingList list); - } -} diff --git a/API/Interfaces/Repositories/ISeriesMetadataRepository.cs b/API/Interfaces/Repositories/ISeriesMetadataRepository.cs deleted file mode 100644 index 6d6d09f50..000000000 --- a/API/Interfaces/Repositories/ISeriesMetadataRepository.cs +++ /dev/null @@ -1,10 +0,0 @@ -using API.Entities; -using API.Entities.Metadata; - -namespace API.Interfaces.Repositories -{ - public interface ISeriesMetadataRepository - { - void Update(SeriesMetadata seriesMetadata); - } -} diff --git a/API/Interfaces/Repositories/ISeriesRepository.cs b/API/Interfaces/Repositories/ISeriesRepository.cs deleted file mode 100644 index e4271b247..000000000 --- a/API/Interfaces/Repositories/ISeriesRepository.cs +++ /dev/null @@ -1,63 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Data.Scanner; -using API.DTOs; -using API.DTOs.Filtering; -using API.Entities; -using API.Entities.Enums; -using API.Entities.Metadata; -using API.Helpers; - -namespace API.Interfaces.Repositories -{ - public interface ISeriesRepository - { - void Attach(Series series); - void Update(Series series); - void Remove(Series series); - void Remove(IEnumerable series); - Task DoesSeriesNameExistInLibrary(string name, MangaFormat format); - /// - /// Adds user information like progress, ratings, etc - /// - /// - /// - /// - /// - Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter); - /// - /// Does not add user information like progress, ratings, etc. - /// - /// - /// Series name to search for - /// - Task> SearchSeries(int[] libraryIds, string searchQuery); - Task> GetSeriesForLibraryIdAsync(int libraryId); - Task GetSeriesDtoByIdAsync(int seriesId, int userId); - Task DeleteSeriesAsync(int seriesId); - Task GetSeriesByIdAsync(int seriesId); - Task> GetSeriesByIdsAsync(IList seriesIds); - Task GetChapterIdsForSeriesAsync(int[] seriesIds); - Task>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds); - /// - /// Used to add Progress/Rating information to series list. - /// - /// - /// - /// - Task AddSeriesModifiers(int userId, List series); - Task GetSeriesCoverImageAsync(int seriesId); - Task> GetOnDeck(int userId, int libraryId, UserParams userParams, FilterDto filter); - Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo - Task GetSeriesMetadata(int seriesId); - Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams); - Task> GetFilesForSeries(int seriesId); - Task> GetSeriesDtoForIdsAsync(IEnumerable seriesIds, int userId); - Task> GetAllCoverImagesAsync(); - Task> GetLockedCoverImagesAsync(); - Task> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams); - Task GetFullSeriesForSeriesIdAsync(int seriesId); - Task GetChunkInfo(int libraryId = 0); - Task> GetSeriesMetadataForIdsAsync(IEnumerable seriesIds); - } -} diff --git a/API/Interfaces/Repositories/ISettingsRepository.cs b/API/Interfaces/Repositories/ISettingsRepository.cs deleted file mode 100644 index 95178ea79..000000000 --- a/API/Interfaces/Repositories/ISettingsRepository.cs +++ /dev/null @@ -1,17 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs.Settings; -using API.Entities; -using API.Entities.Enums; - -namespace API.Interfaces.Repositories -{ - public interface ISettingsRepository - { - void Update(ServerSetting settings); - Task GetSettingsDtoAsync(); - Task GetSettingAsync(ServerSettingKey key); - Task> GetSettingsAsync(); - - } -} diff --git a/API/Interfaces/Repositories/IUserRepository.cs b/API/Interfaces/Repositories/IUserRepository.cs deleted file mode 100644 index 65d943623..000000000 --- a/API/Interfaces/Repositories/IUserRepository.cs +++ /dev/null @@ -1,33 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Data.Repositories; -using API.DTOs; -using API.DTOs.Reader; -using API.Entities; - -namespace API.Interfaces.Repositories -{ - public interface IUserRepository - { - void Update(AppUser user); - void Update(AppUserPreferences preferences); - void Update(AppUserBookmark bookmark); - public void Delete(AppUser user); - Task> GetMembersAsync(); - Task> GetAdminUsersAsync(); - Task> GetNonAdminUsersAsync(); - Task IsUserAdmin(AppUser user); - Task GetUserRating(int seriesId, int userId); - Task GetPreferencesAsync(string username); - Task> GetBookmarkDtosForSeries(int userId, int seriesId); - Task> GetBookmarkDtosForVolume(int userId, int volumeId); - Task> GetBookmarkDtosForChapter(int userId, int chapterId); - Task> GetAllBookmarkDtos(int userId); - Task GetBookmarkForPage(int page, int chapterId, int userId); - Task GetUserIdByApiKeyAsync(string apiKey); - Task GetUserByUsernameAsync(string username, AppUserIncludes includeFlags = AppUserIncludes.None); - Task GetUserByIdAsync(int userId, AppUserIncludes includeFlags = AppUserIncludes.None); - Task GetUserIdByUsernameAsync(string username); - Task GetUserWithReadingListsByUsernameAsync(string username); - } -} diff --git a/API/Interfaces/Repositories/IVolumeRepository.cs b/API/Interfaces/Repositories/IVolumeRepository.cs deleted file mode 100644 index 63045a38d..000000000 --- a/API/Interfaces/Repositories/IVolumeRepository.cs +++ /dev/null @@ -1,25 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs; -using API.Entities; - -namespace API.Interfaces.Repositories -{ - public interface IVolumeRepository - { - void Add(Volume volume); - void Update(Volume volume); - void Remove(Volume volume); - Task> GetFilesForVolume(int volumeId); - Task GetVolumeCoverImageAsync(int volumeId); - Task> GetChapterIdsByVolumeIds(IReadOnlyList volumeIds); - - // From Series Repo - Task> GetVolumesDtoAsync(int seriesId, int userId); - Task GetVolumeAsync(int volumeId); - Task GetVolumeDtoAsync(int volumeId, int userId); - Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false); - Task> GetVolumes(int seriesId); - Task GetVolumeByIdAsync(int volumeId); - } -} diff --git a/API/Interfaces/Services/IAccountService.cs b/API/Interfaces/Services/IAccountService.cs deleted file mode 100644 index e07ce2f79..000000000 --- a/API/Interfaces/Services/IAccountService.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Entities; -using API.Errors; - -namespace API.Interfaces.Services -{ - public interface IAccountService - { - Task> ChangeUserPassword(AppUser user, string newPassword); - } -} diff --git a/API/Interfaces/Services/IArchiveService.cs b/API/Interfaces/Services/IArchiveService.cs deleted file mode 100644 index f2567341a..000000000 --- a/API/Interfaces/Services/IArchiveService.cs +++ /dev/null @@ -1,21 +0,0 @@ -using System; -using System.Collections.Generic; -using System.IO.Compression; -using System.Threading.Tasks; -using API.Archive; -using API.Data.Metadata; - -namespace API.Interfaces.Services -{ - public interface IArchiveService - { - void ExtractArchive(string archivePath, string extractPath); - int GetNumberOfPagesFromArchive(string archivePath); - string GetCoverImage(string archivePath, string fileName); - bool IsValidArchive(string archivePath); - ComicInfo GetComicInfo(string archivePath); - ArchiveLibrary CanOpen(string archivePath); - bool ArchiveNeedsFlattening(ZipArchive archive); - Task> CreateZipForDownload(IEnumerable files, string tempFolder); - } -} diff --git a/API/Interfaces/Services/IBackupService.cs b/API/Interfaces/Services/IBackupService.cs deleted file mode 100644 index 315b852f0..000000000 --- a/API/Interfaces/Services/IBackupService.cs +++ /dev/null @@ -1,20 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; - -namespace API.Interfaces.Services -{ - public interface IBackupService - { - Task BackupDatabase(); - /// - /// Returns a list of full paths of the logs files detailed in . - /// - /// - /// - /// - IEnumerable LogFiles(int maxRollingFiles, string logFileName); - - void CleanupBackups(); - } -} \ No newline at end of file diff --git a/API/Interfaces/Services/IBookService.cs b/API/Interfaces/Services/IBookService.cs deleted file mode 100644 index e78669755..000000000 --- a/API/Interfaces/Services/IBookService.cs +++ /dev/null @@ -1,33 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Data.Metadata; -using API.Parser; -using VersOne.Epub; - -namespace API.Interfaces.Services -{ - public interface IBookService - { - int GetNumberOfPages(string filePath); - string GetCoverImage(string fileFilePath, string fileName); - Task> CreateKeyToPageMappingAsync(EpubBookRef book); - - /// - /// Scopes styles to .reading-section and replaces img src to the passed apiBase - /// - /// - /// - /// If the stylesheetHtml contains Import statements, when scoping the filename, scope needs to be wrt filepath. - /// Book Reference, needed for if you expect Import statements - /// - Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book); - ComicInfo GetComicInfo(string filePath); - ParserInfo ParseInfo(string filePath); - /// - /// Extracts a PDF file's pages as images to an target directory - /// - /// - /// Where the files will be extracted to. If doesn't exist, will be created. - void ExtractPdfImages(string fileFilePath, string targetDirectory); - } -} diff --git a/API/Interfaces/Services/ICacheService.cs b/API/Interfaces/Services/ICacheService.cs deleted file mode 100644 index 395898dc2..000000000 --- a/API/Interfaces/Services/ICacheService.cs +++ /dev/null @@ -1,41 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.Entities; - -namespace API.Interfaces.Services -{ - public interface ICacheService - { - /// - /// Ensures the cache is created for the given chapter and if not, will create it. Should be called before any other - /// cache operations (except cleanup). - /// - /// - /// Chapter for the passed chapterId. Side-effect from ensuring cache. - Task Ensure(int chapterId); - - /// - /// Clears cache directory of all folders and files. - /// - void Cleanup(); - - /// - /// Clears cache directory of all volumes. This can be invoked from deleting a library or a series. - /// - /// Volumes that belong to that library. Assume the library might have been deleted before this invocation. - void CleanupChapters(IEnumerable chapterIds); - - - /// - /// Returns the absolute path of a cached page. - /// - /// Chapter entity with Files populated. - /// Page number to look for - /// - Task<(string path, MangaFile file)> GetCachedPagePath(Chapter chapter, int page); - - void EnsureCacheDirectory(); - string GetCachedEpubFile(int chapterId, Chapter chapter); - public void ExtractChapterFiles(string extractPath, IReadOnlyList files); - } -} diff --git a/API/Interfaces/Services/ICleanupService.cs b/API/Interfaces/Services/ICleanupService.cs deleted file mode 100644 index afabb9900..000000000 --- a/API/Interfaces/Services/ICleanupService.cs +++ /dev/null @@ -1,10 +0,0 @@ -using System.Threading.Tasks; - -namespace API.Interfaces.Services -{ - public interface ICleanupService - { - Task Cleanup(); - void CleanupCacheDirectory(); - } -} diff --git a/API/Interfaces/Services/IDirectoryService.cs b/API/Interfaces/Services/IDirectoryService.cs deleted file mode 100644 index 9237b9fc3..000000000 --- a/API/Interfaces/Services/IDirectoryService.cs +++ /dev/null @@ -1,22 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; - -namespace API.Interfaces.Services -{ - public interface IDirectoryService - { - /// - /// Lists out top-level folders for a given directory. Filters out System and Hidden folders. - /// - /// Absolute path of directory to scan. - /// List of folder names - IEnumerable ListDirectory(string rootPath); - Task ReadFileAsync(string path); - bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = ""); - bool Exists(string directory); - void CopyFileToDirectory(string fullFilePath, string targetDirectory); - int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger); - } -} diff --git a/API/Interfaces/Services/IImageService.cs b/API/Interfaces/Services/IImageService.cs deleted file mode 100644 index 0aba07f39..000000000 --- a/API/Interfaces/Services/IImageService.cs +++ /dev/null @@ -1,23 +0,0 @@ -using API.Entities; -using API.Services; - -namespace API.Interfaces.Services -{ - public interface IImageService - { - string GetCoverImage(string path, string fileName); - string GetCoverFile(MangaFile file); - /// - /// Creates a Thumbnail version of an image - /// - /// Path to the image file - /// File name with extension of the file. This will always write to - public string CreateThumbnail(string path, string fileName); - /// - /// Creates a Thumbnail version of a base64 image - /// - /// base64 encoded image - /// File name with extension of the file. This will always write to - public string CreateThumbnailFromBase64(string encodedImage, string fileName); - } -} diff --git a/API/Interfaces/Services/IMetadataService.cs b/API/Interfaces/Services/IMetadataService.cs deleted file mode 100644 index 53f3a2757..000000000 --- a/API/Interfaces/Services/IMetadataService.cs +++ /dev/null @@ -1,20 +0,0 @@ -using System.Threading.Tasks; - -namespace API.Interfaces.Services -{ - public interface IMetadataService - { - /// - /// Recalculates metadata for all entities in a library. - /// - /// - /// - Task RefreshMetadata(int libraryId, bool forceUpdate = false); - /// - /// Performs a forced refresh of metatdata just for a series and it's nested entities - /// - /// - /// - Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false); - } -} diff --git a/API/Interfaces/Services/IReaderService.cs b/API/Interfaces/Services/IReaderService.cs deleted file mode 100644 index a72b90699..000000000 --- a/API/Interfaces/Services/IReaderService.cs +++ /dev/null @@ -1,17 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs; -using API.Entities; - -namespace API.Interfaces.Services -{ - public interface IReaderService - { - void MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable chapters); - void MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable chapters); - Task SaveReadingProgress(ProgressDto progressDto, int userId); - Task CapPageToChapter(int chapterId, int page); - Task GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId); - Task GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId); - } -} diff --git a/API/Interfaces/Services/IScannerService.cs b/API/Interfaces/Services/IScannerService.cs deleted file mode 100644 index bab0ca588..000000000 --- a/API/Interfaces/Services/IScannerService.cs +++ /dev/null @@ -1,18 +0,0 @@ - -using System.Threading; -using System.Threading.Tasks; - -namespace API.Interfaces.Services -{ - public interface IScannerService - { - /// - /// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite - /// cover images if forceUpdate is true. - /// - /// Library to scan against - Task ScanLibrary(int libraryId); - Task ScanLibraries(); - Task ScanSeries(int libraryId, int seriesId, CancellationToken token); - } -} diff --git a/API/Interfaces/Services/IStartupTask.cs b/API/Interfaces/Services/IStartupTask.cs deleted file mode 100644 index e2a99ecad..000000000 --- a/API/Interfaces/Services/IStartupTask.cs +++ /dev/null @@ -1,10 +0,0 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace API.Interfaces.Services -{ - public interface IStartupTask - { - Task ExecuteAsync(CancellationToken cancellationToken = default); - } -} \ No newline at end of file diff --git a/API/Interfaces/Services/IStatsService.cs b/API/Interfaces/Services/IStatsService.cs deleted file mode 100644 index 685c3057d..000000000 --- a/API/Interfaces/Services/IStatsService.cs +++ /dev/null @@ -1,11 +0,0 @@ -using System.Threading.Tasks; -using API.DTOs.Stats; - -namespace API.Interfaces.Services -{ - public interface IStatsService - { - Task Send(); - Task GetServerInfo(); - } -} diff --git a/API/Interfaces/Services/ITokenService.cs b/API/Interfaces/Services/ITokenService.cs deleted file mode 100644 index 14765f2f0..000000000 --- a/API/Interfaces/Services/ITokenService.cs +++ /dev/null @@ -1,10 +0,0 @@ -using System.Threading.Tasks; -using API.Entities; - -namespace API.Interfaces.Services -{ - public interface ITokenService - { - Task CreateToken(AppUser user); - } -} \ No newline at end of file diff --git a/API/Interfaces/Services/IVersionUpdaterService.cs b/API/Interfaces/Services/IVersionUpdaterService.cs deleted file mode 100644 index ddde09960..000000000 --- a/API/Interfaces/Services/IVersionUpdaterService.cs +++ /dev/null @@ -1,13 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using API.DTOs.Update; - -namespace API.Interfaces.Services -{ - public interface IVersionUpdaterService - { - Task CheckForUpdate(); - Task PushUpdate(UpdateNotificationDto update); - Task> GetAllReleases(); - } -} diff --git a/API/Interfaces/Services/ReaderService.cs b/API/Interfaces/Services/ReaderService.cs deleted file mode 100644 index 7eb2e1118..000000000 --- a/API/Interfaces/Services/ReaderService.cs +++ /dev/null @@ -1,310 +0,0 @@ - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using API.Comparators; -using API.Data.Repositories; -using API.DTOs; -using API.Entities; -using Microsoft.Extensions.Logging; - -namespace API.Interfaces.Services -{ - public class ReaderService : IReaderService - { - private readonly IUnitOfWork _unitOfWork; - private readonly ILogger _logger; - private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); - private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); - - public ReaderService(IUnitOfWork unitOfWork, ILogger logger) - { - _unitOfWork = unitOfWork; - _logger = logger; - } - - /// - /// Marks all Chapters as Read by creating or updating UserProgress rows. Does not commit. - /// - /// - /// - /// - public void MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable chapters) - { - foreach (var chapter in chapters) - { - var userProgress = GetUserProgressForChapter(user, chapter); - - if (userProgress == null) - { - user.Progresses.Add(new AppUserProgress - { - PagesRead = chapter.Pages, - VolumeId = chapter.VolumeId, - SeriesId = seriesId, - ChapterId = chapter.Id - }); - } - else - { - userProgress.PagesRead = chapter.Pages; - userProgress.SeriesId = seriesId; - userProgress.VolumeId = chapter.VolumeId; - } - } - } - - /// - /// Marks all Chapters as Unread by creating or updating UserProgress rows. Does not commit. - /// - /// - /// - /// - public void MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable chapters) - { - foreach (var chapter in chapters) - { - var userProgress = GetUserProgressForChapter(user, chapter); - - if (userProgress == null) - { - user.Progresses.Add(new AppUserProgress - { - PagesRead = 0, - VolumeId = chapter.VolumeId, - SeriesId = seriesId, - ChapterId = chapter.Id - }); - } - else - { - userProgress.PagesRead = 0; - userProgress.SeriesId = seriesId; - userProgress.VolumeId = chapter.VolumeId; - } - } - } - - /// - /// Gets the User Progress for a given Chapter. This will handle any duplicates that might have occured in past versions and will delete them. Does not commit. - /// - /// - /// - /// - public static AppUserProgress GetUserProgressForChapter(AppUser user, Chapter chapter) - { - AppUserProgress userProgress = null; - try - { - userProgress = - user.Progresses.SingleOrDefault(x => x.ChapterId == chapter.Id && x.AppUserId == user.Id); - } - catch (Exception) - { - // There is a very rare chance that user progress will duplicate current row. If that happens delete one with less pages - var progresses = user.Progresses.Where(x => x.ChapterId == chapter.Id && x.AppUserId == user.Id).ToList(); - if (progresses.Count > 1) - { - user.Progresses = new List() - { - user.Progresses.First() - }; - userProgress = user.Progresses.First(); - } - } - - return userProgress; - } - - /// - /// Saves progress to DB - /// - /// - /// - /// - public async Task SaveReadingProgress(ProgressDto progressDto, int userId) - { - // Don't let user save past total pages. - progressDto.PageNum = await CapPageToChapter(progressDto.ChapterId, progressDto.PageNum); - - try - { - var userProgress = - await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(progressDto.ChapterId, userId); - - if (userProgress == null) - { - // Create a user object - var userWithProgress = - await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress); - userWithProgress.Progresses ??= new List(); - userWithProgress.Progresses.Add(new AppUserProgress - { - PagesRead = progressDto.PageNum, - VolumeId = progressDto.VolumeId, - SeriesId = progressDto.SeriesId, - ChapterId = progressDto.ChapterId, - BookScrollId = progressDto.BookScrollId, - LastModified = DateTime.Now - }); - _unitOfWork.UserRepository.Update(userWithProgress); - } - else - { - userProgress.PagesRead = progressDto.PageNum; - userProgress.SeriesId = progressDto.SeriesId; - userProgress.VolumeId = progressDto.VolumeId; - userProgress.BookScrollId = progressDto.BookScrollId; - userProgress.LastModified = DateTime.Now; - _unitOfWork.AppUserProgressRepository.Update(userProgress); - } - - if (await _unitOfWork.CommitAsync()) - { - return true; - } - } - catch (Exception exception) - { - _logger.LogError(exception, "Could not save progress"); - await _unitOfWork.RollbackAsync(); - } - - return false; - } - - /// - /// Ensures that the page is within 0 and total pages for a chapter. Makes one DB call. - /// - /// - /// - /// - public async Task CapPageToChapter(int chapterId, int page) - { - var totalPages = await _unitOfWork.ChapterRepository.GetChapterTotalPagesAsync(chapterId); - if (page > totalPages) - { - page = totalPages; - } - - if (page < 0) - { - page = 0; - } - - return page; - } - - /// - /// Tries to find the next logical Chapter - /// - /// - /// V1 → V2 → V3 chapter 0 → V3 chapter 10 → SP 01 → SP 02 - /// - /// - /// - /// - /// - /// -1 if nothing can be found - public async Task GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId) - { - var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList(); - var currentVolume = volumes.Single(v => v.Id == volumeId); - var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId); - - if (currentVolume.Number == 0) - { - // Handle specials by sorting on their Filename aka Range - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()), currentChapter.Number); - if (chapterId > 0) return chapterId; - } - - foreach (var volume in volumes) - { - if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1) - { - // Handle Chapters within current Volume - // In this case, i need 0 first because 0 represents a full volume file. - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting), currentChapter.Number); - if (chapterId > 0) return chapterId; - } - - if (volume.Number != currentVolume.Number + 1) continue; - - // Handle Chapters within next Volume - // ! When selecting the chapter for the next volume, we need to make sure a c0 comes before a c1+ - var chapters = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).ToList(); - if (currentChapter.Number.Equals("0") && chapters.Last().Number.Equals("0")) - { - return chapters.Last().Id; - } - - var firstChapter = chapters.FirstOrDefault(); - if (firstChapter == null) return -1; - return firstChapter.Id; - - } - - return -1; - } - /// - /// Tries to find the prev logical Chapter - /// - /// - /// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← SP 01 ← SP 02 - /// - /// - /// - /// - /// - /// -1 if nothing can be found - public async Task GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId) - { - var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList(); - var currentVolume = volumes.Single(v => v.Id == volumeId); - var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId); - - if (currentVolume.Number == 0) - { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()).Reverse(), currentChapter.Number); - if (chapterId > 0) return chapterId; - } - - foreach (var volume in volumes) - { - if (volume.Number == currentVolume.Number) - { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).Reverse(), currentChapter.Number); - if (chapterId > 0) return chapterId; - } - if (volume.Number == currentVolume.Number - 1) - { - var lastChapter = volume.Chapters - .OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).LastOrDefault(); - if (lastChapter == null) return -1; - return lastChapter.Id; - } - } - return -1; - } - - private static int GetNextChapterId(IEnumerable chapters, string currentChapterNumber) - { - var next = false; - var chaptersList = chapters.ToList(); - foreach (var chapter in chaptersList) - { - if (next) - { - return chapter.Id; - } - if (currentChapterNumber.Equals(chapter.Number)) next = true; - } - - return -1; - } - - - } -} diff --git a/API/Parser/DefaultParser.cs b/API/Parser/DefaultParser.cs new file mode 100644 index 000000000..23b5c1d58 --- /dev/null +++ b/API/Parser/DefaultParser.cs @@ -0,0 +1,160 @@ +using System.IO; +using System.Linq; +using API.Entities.Enums; +using API.Services; + +namespace API.Parser; + +/// +/// This is an implementation of the Parser that is the basis for everything +/// +public class DefaultParser +{ + private readonly IDirectoryService _directoryService; + + public DefaultParser(IDirectoryService directoryService) + { + _directoryService = directoryService; + } + + /// + /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed + /// from filename. + /// + /// + /// Root folder + /// Defaults to Manga. Allows different Regex to be used for parsing. + /// or null if Series was empty + public ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga) + { + var fileName = _directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath); + ParserInfo ret; + + if (Parser.IsEpub(filePath)) + { + ret = new ParserInfo() + { + Chapters = Parser.ParseChapter(fileName) ?? Parser.ParseComicChapter(fileName), + Series = Parser.ParseSeries(fileName) ?? Parser.ParseComicSeries(fileName), + Volumes = Parser.ParseVolume(fileName) ?? Parser.ParseComicVolume(fileName), + Filename = Path.GetFileName(filePath), + Format = Parser.ParseFormat(filePath), + FullFilePath = filePath + }; + } + else + { + ret = new ParserInfo() + { + Chapters = type == LibraryType.Manga ? Parser.ParseChapter(fileName) : Parser.ParseComicChapter(fileName), + Series = type == LibraryType.Manga ? Parser.ParseSeries(fileName) : Parser.ParseComicSeries(fileName), + Volumes = type == LibraryType.Manga ? Parser.ParseVolume(fileName) : Parser.ParseComicVolume(fileName), + Filename = Path.GetFileName(filePath), + Format = Parser.ParseFormat(filePath), + Title = Path.GetFileNameWithoutExtension(fileName), + FullFilePath = filePath + }; + } + + if (Parser.IsImage(filePath) && Parser.IsCoverImage(filePath)) return null; + + if (Parser.IsImage(filePath)) + { + // Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders. + ret.Volumes = Parser.DefaultVolume; + ret.Chapters = Parser.DefaultChapter; + ret.Series = string.Empty; + } + + if (ret.Series == string.Empty || Parser.IsImage(filePath)) + { + // Try to parse information out of each folder all the way to rootPath + ParseFromFallbackFolders(filePath, rootPath, type, ref ret); + } + + var edition = Parser.ParseEdition(fileName); + if (!string.IsNullOrEmpty(edition)) + { + ret.Series = Parser.CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic); + ret.Edition = edition; + } + + var isSpecial = type == LibraryType.Comic ? Parser.ParseComicSpecial(fileName) : Parser.ParseMangaSpecial(fileName); + // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that + // could cause a problem as Omake is a special term, but there is valid volume/chapter information. + if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.DefaultVolume && !string.IsNullOrEmpty(isSpecial)) + { + ret.IsSpecial = true; + ParseFromFallbackFolders(filePath, rootPath, type, ref ret); + } + + // If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name + if (Parser.HasSpecialMarker(fileName)) + { + ret.IsSpecial = true; + ret.Chapters = Parser.DefaultChapter; + ret.Volumes = Parser.DefaultVolume; + + ParseFromFallbackFolders(filePath, rootPath, type, ref ret); + } + + if (string.IsNullOrEmpty(ret.Series)) + { + ret.Series = Parser.CleanTitle(fileName, type is LibraryType.Comic); + } + + // Pdfs may have .pdf in the series name, remove that + if (Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf")) + { + ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length); + } + + return ret.Series == string.Empty ? null : ret; + } + + /// + /// Fills out by trying to parse volume, chapters, and series from folders + /// + /// + /// + /// + /// Expects a non-null ParserInfo which this method will populate + public void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret) + { + var fallbackFolders = _directoryService.GetFoldersTillRoot(rootPath, filePath).ToList(); + for (var i = 0; i < fallbackFolders.Count; i++) + { + var folder = fallbackFolders[i]; + if (!string.IsNullOrEmpty(Parser.ParseMangaSpecial(folder))) continue; + + var parsedVolume = type is LibraryType.Manga ? Parser.ParseVolume(folder) : Parser.ParseComicVolume(folder); + var parsedChapter = type is LibraryType.Manga ? Parser.ParseChapter(folder) : Parser.ParseComicChapter(folder); + + if (!parsedVolume.Equals(Parser.DefaultVolume) || !parsedChapter.Equals(Parser.DefaultChapter)) + { + if ((ret.Volumes.Equals(Parser.DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(Parser.DefaultVolume)) + { + ret.Volumes = parsedVolume; + } + if ((ret.Chapters.Equals(Parser.DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(Parser.DefaultChapter)) + { + ret.Chapters = parsedChapter; + } + } + + var series = Parser.ParseSeries(folder); + + if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1)) + { + ret.Series = Parser.CleanTitle(folder, type is LibraryType.Comic); + break; + } + + if (!string.IsNullOrEmpty(series)) + { + ret.Series = series; + break; + } + } + } +} diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 8183505ff..826371ae5 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -491,146 +491,146 @@ namespace API.Parser ); - /// - /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed - /// from filename. - /// - /// - /// Root folder - /// Defaults to Manga. Allows different Regex to be used for parsing. - /// or null if Series was empty - public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga) - { - var fileName = Path.GetFileNameWithoutExtension(filePath); - ParserInfo ret; - - if (IsEpub(filePath)) - { - ret = new ParserInfo() - { - Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName), - Series = ParseSeries(fileName) ?? ParseComicSeries(fileName), - Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName), - Filename = Path.GetFileName(filePath), - Format = ParseFormat(filePath), - FullFilePath = filePath - }; - } - else - { - ret = new ParserInfo() - { - Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), - Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), - Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), - Filename = Path.GetFileName(filePath), - Format = ParseFormat(filePath), - Title = Path.GetFileNameWithoutExtension(fileName), - FullFilePath = filePath - }; - } - - if (IsImage(filePath) && IsCoverImage(filePath)) return null; - - if (IsImage(filePath)) - { - // Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders. - ret.Volumes = DefaultVolume; - ret.Chapters = DefaultChapter; - ret.Series = string.Empty; - } - - if (ret.Series == string.Empty || IsImage(filePath)) - { - // Try to parse information out of each folder all the way to rootPath - ParseFromFallbackFolders(filePath, rootPath, type, ref ret); - } - - var edition = ParseEdition(fileName); - if (!string.IsNullOrEmpty(edition)) - { - ret.Series = CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic); - ret.Edition = edition; - } - - var isSpecial = type == LibraryType.Comic ? ParseComicSpecial(fileName) : ParseMangaSpecial(fileName); - // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that - // could cause a problem as Omake is a special term, but there is valid volume/chapter information. - if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial)) - { - ret.IsSpecial = true; - ParseFromFallbackFolders(filePath, rootPath, type, ref ret); - } - - // If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name - if (HasSpecialMarker(fileName)) - { - ret.IsSpecial = true; - ret.Chapters = DefaultChapter; - ret.Volumes = DefaultVolume; - - ParseFromFallbackFolders(filePath, rootPath, type, ref ret); - } - - if (string.IsNullOrEmpty(ret.Series)) - { - ret.Series = CleanTitle(fileName, type is LibraryType.Comic); - } - - // Pdfs may have .pdf in the series name, remove that - if (IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf")) - { - ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length); - } - - return ret.Series == string.Empty ? null : ret; - } - - /// - /// - /// - /// - /// - /// - /// Expects a non-null ParserInfo which this method will populate - public static void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret) - { - var fallbackFolders = DirectoryService.GetFoldersTillRoot(rootPath, filePath).ToList(); - for (var i = 0; i < fallbackFolders.Count; i++) - { - var folder = fallbackFolders[i]; - if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; - - var parsedVolume = type is LibraryType.Manga ? ParseVolume(folder) : ParseComicVolume(folder); - var parsedChapter = type is LibraryType.Manga ? ParseChapter(folder) : ParseComicChapter(folder); - - if (!parsedVolume.Equals(DefaultVolume) || !parsedChapter.Equals(DefaultChapter)) - { - if ((ret.Volumes.Equals(DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(DefaultVolume)) - { - ret.Volumes = parsedVolume; - } - if ((ret.Chapters.Equals(DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(DefaultChapter)) - { - ret.Chapters = parsedChapter; - } - } - - var series = ParseSeries(folder); - - if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1)) - { - ret.Series = CleanTitle(folder, type is LibraryType.Comic); - break; - } - - if (!string.IsNullOrEmpty(series)) - { - ret.Series = series; - break; - } - } - } + // /// + // /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed + // /// from filename. + // /// + // /// + // /// Root folder + // /// Defaults to Manga. Allows different Regex to be used for parsing. + // /// or null if Series was empty + // public static ParserInfo Parse(string filePath, string rootPath, IDirectoryService directoryService, LibraryType type = LibraryType.Manga) + // { + // var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath); + // ParserInfo ret; + // + // if (IsEpub(filePath)) + // { + // ret = new ParserInfo() + // { + // Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName), + // Series = ParseSeries(fileName) ?? ParseComicSeries(fileName), + // Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName), + // Filename = Path.GetFileName(filePath), + // Format = ParseFormat(filePath), + // FullFilePath = filePath + // }; + // } + // else + // { + // ret = new ParserInfo() + // { + // Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), + // Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), + // Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), + // Filename = Path.GetFileName(filePath), + // Format = ParseFormat(filePath), + // Title = Path.GetFileNameWithoutExtension(fileName), + // FullFilePath = filePath + // }; + // } + // + // if (IsImage(filePath) && IsCoverImage(filePath)) return null; + // + // if (IsImage(filePath)) + // { + // // Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders. + // ret.Volumes = DefaultVolume; + // ret.Chapters = DefaultChapter; + // ret.Series = string.Empty; + // } + // + // if (ret.Series == string.Empty || IsImage(filePath)) + // { + // // Try to parse information out of each folder all the way to rootPath + // ParseFromFallbackFolders(filePath, rootPath, type, directoryService, ref ret); + // } + // + // var edition = ParseEdition(fileName); + // if (!string.IsNullOrEmpty(edition)) + // { + // ret.Series = CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic); + // ret.Edition = edition; + // } + // + // var isSpecial = type == LibraryType.Comic ? ParseComicSpecial(fileName) : ParseMangaSpecial(fileName); + // // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that + // // could cause a problem as Omake is a special term, but there is valid volume/chapter information. + // if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial)) + // { + // ret.IsSpecial = true; + // ParseFromFallbackFolders(filePath, rootPath, type, directoryService, ref ret); + // } + // + // // If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name + // if (HasSpecialMarker(fileName)) + // { + // ret.IsSpecial = true; + // ret.Chapters = DefaultChapter; + // ret.Volumes = DefaultVolume; + // + // ParseFromFallbackFolders(filePath, rootPath, type, directoryService, ref ret); + // } + // + // if (string.IsNullOrEmpty(ret.Series)) + // { + // ret.Series = CleanTitle(fileName, type is LibraryType.Comic); + // } + // + // // Pdfs may have .pdf in the series name, remove that + // if (IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf")) + // { + // ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length); + // } + // + // return ret.Series == string.Empty ? null : ret; + // } + // + // /// + // /// + // /// + // /// + // /// + // /// + // /// Expects a non-null ParserInfo which this method will populate + // public static void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, IDirectoryService directoryService, ref ParserInfo ret) + // { + // var fallbackFolders = directoryService.GetFoldersTillRoot(rootPath, filePath).ToList(); + // for (var i = 0; i < fallbackFolders.Count; i++) + // { + // var folder = fallbackFolders[i]; + // if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; + // + // var parsedVolume = type is LibraryType.Manga ? ParseVolume(folder) : ParseComicVolume(folder); + // var parsedChapter = type is LibraryType.Manga ? ParseChapter(folder) : ParseComicChapter(folder); + // + // if (!parsedVolume.Equals(DefaultVolume) || !parsedChapter.Equals(DefaultChapter)) + // { + // if ((ret.Volumes.Equals(DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(DefaultVolume)) + // { + // ret.Volumes = parsedVolume; + // } + // if ((ret.Chapters.Equals(DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(DefaultChapter)) + // { + // ret.Chapters = parsedChapter; + // } + // } + // + // var series = ParseSeries(folder); + // + // if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1)) + // { + // ret.Series = CleanTitle(folder, type is LibraryType.Comic); + // break; + // } + // + // if (!string.IsNullOrEmpty(series)) + // { + // ret.Series = series; + // break; + // } + // } + // } public static MangaFormat ParseFormat(string filePath) { diff --git a/API/Program.cs b/API/Program.cs index 33c8420c8..18ac62dd6 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.IO.Abstractions; using System.Security.Cryptography; using System.Threading.Tasks; using API.Data; @@ -15,6 +16,7 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; +using NetVips; namespace API { @@ -31,7 +33,19 @@ namespace API Console.OutputEncoding = System.Text.Encoding.UTF8; var isDocker = new OsInfo(Array.Empty()).IsDocker; - MigrateConfigFiles.Migrate(isDocker); + var migrateLogger = LoggerFactory.Create(builder => + { + builder + //.AddConfiguration(Configuration.GetSection("Logging")) + .AddFilter("Microsoft", LogLevel.Warning) + .AddFilter("System", LogLevel.Warning) + .AddFilter("SampleApp.Program", LogLevel.Debug) + .AddConsole() + .AddEventLog(); + }); + var mLogger = migrateLogger.CreateLogger(); + + MigrateConfigFiles.Migrate(isDocker, new DirectoryService(mLogger, new FileSystem())); // Before anything, check if JWT has been generated properly or if user still has default if (!Configuration.CheckIfJwtTokenSet() && @@ -60,14 +74,16 @@ namespace API return; } + var directoryService = services.GetRequiredService(); - var requiresCoverImageMigration = !Directory.Exists(DirectoryService.CoverImageDirectory); + + var requiresCoverImageMigration = !Directory.Exists(directoryService.CoverImageDirectory); try { // If this is a new install, tables wont exist yet if (requiresCoverImageMigration) { - MigrateCoverImages.ExtractToImages(context); + MigrateCoverImages.ExtractToImages(context, directoryService, services.GetRequiredService()); } } catch (Exception) @@ -80,11 +96,11 @@ namespace API if (requiresCoverImageMigration) { - await MigrateCoverImages.UpdateDatabaseWithImages(context); + await MigrateCoverImages.UpdateDatabaseWithImages(context, directoryService); } await Seed.SeedRoles(roleManager); - await Seed.SeedSettings(context); + await Seed.SeedSettings(context, directoryService); await Seed.SeedUserApiKeys(context); } catch (Exception ex) diff --git a/API/Services/AccountService.cs b/API/Services/AccountService.cs index 0cc720bb6..0591770ec 100644 --- a/API/Services/AccountService.cs +++ b/API/Services/AccountService.cs @@ -3,12 +3,16 @@ using System.Linq; using System.Threading.Tasks; using API.Entities; using API.Errors; -using API.Interfaces.Services; using Microsoft.AspNetCore.Identity; using Microsoft.Extensions.Logging; namespace API.Services { + public interface IAccountService + { + Task> ChangeUserPassword(AppUser user, string newPassword); + } + public class AccountService : IAccountService { private readonly UserManager _userManager; diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 9d75adc55..17f88238f 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -10,7 +10,6 @@ using API.Archive; using API.Comparators; using API.Data.Metadata; using API.Extensions; -using API.Interfaces.Services; using API.Services.Tasks; using Kavita.Common; using Microsoft.Extensions.Logging; @@ -19,6 +18,18 @@ using SharpCompress.Common; namespace API.Services { + public interface IArchiveService + { + void ExtractArchive(string archivePath, string extractPath); + int GetNumberOfPagesFromArchive(string archivePath); + string GetCoverImage(string archivePath, string fileName); + bool IsValidArchive(string archivePath); + ComicInfo GetComicInfo(string archivePath); + ArchiveLibrary CanOpen(string archivePath); + bool ArchiveNeedsFlattening(ZipArchive archive); + Task> CreateZipForDownload(IEnumerable files, string tempFolder); + } + /// /// Responsible for manipulating Archive files. Used by and /// @@ -27,12 +38,14 @@ namespace API.Services { private readonly ILogger _logger; private readonly IDirectoryService _directoryService; + private readonly IImageService _imageService; private const string ComicInfoFilename = "comicinfo"; - public ArchiveService(ILogger logger, IDirectoryService directoryService) + public ArchiveService(ILogger logger, IDirectoryService directoryService, IImageService imageService) { _logger = logger; _directoryService = directoryService; + _imageService = imageService; } /// @@ -42,7 +55,7 @@ namespace API.Services /// public virtual ArchiveLibrary CanOpen(string archivePath) { - if (!(File.Exists(archivePath) && Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; + if (string.IsNullOrEmpty(archivePath) || !(File.Exists(archivePath) && Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; try { @@ -239,14 +252,14 @@ namespace API.Services { var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); - var tempLocation = Path.Join(DirectoryService.TempDirectory, $"{tempFolder}_{dateString}"); - DirectoryService.ExistOrCreate(tempLocation); + var tempLocation = Path.Join(_directoryService.TempDirectory, $"{tempFolder}_{dateString}"); + _directoryService.ExistOrCreate(tempLocation); if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) { throw new KavitaException("Unable to copy files to temp directory archive download."); } - var zipPath = Path.Join(DirectoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); + var zipPath = Path.Join(_directoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); try { ZipFile.CreateFromDirectory(tempLocation, zipPath); @@ -260,7 +273,7 @@ namespace API.Services var fileBytes = await _directoryService.ReadFileAsync(zipPath); - DirectoryService.ClearAndDeleteDirectory(tempLocation); + _directoryService.ClearAndDeleteDirectory(tempLocation); // NOTE: For sending back just zip, just schedule this to be called after the file is returned or let next temp storage cleanup take care of it (new FileInfo(zipPath)).Delete(); return Tuple.Create(fileBytes, zipPath); @@ -270,7 +283,7 @@ namespace API.Services { try { - return ImageService.WriteCoverThumbnail(stream, fileName); + return _imageService.WriteCoverThumbnail(stream, fileName); } catch (Exception ex) { @@ -413,9 +426,9 @@ namespace API.Services } - private static void ExtractArchiveEntities(IEnumerable entries, string extractPath) + private void ExtractArchiveEntities(IEnumerable entries, string extractPath) { - DirectoryService.ExistOrCreate(extractPath); + _directoryService.ExistOrCreate(extractPath); foreach (var entry in entries) { entry.WriteToDirectory(extractPath, new ExtractionOptions() @@ -428,7 +441,7 @@ namespace API.Services private void ExtractArchiveEntries(ZipArchive archive, string extractPath) { - // NOTE: In cases where we try to extract, but there are InvalidPathChars, we need to inform the user + // TODO: In cases where we try to extract, but there are InvalidPathChars, we need to inform the user var needsFlattening = ArchiveNeedsFlattening(archive); if (!archive.HasFiles() && !needsFlattening) return; @@ -436,7 +449,7 @@ namespace API.Services if (!needsFlattening) return; _logger.LogDebug("Extracted archive is nested in root folder, flattening..."); - new DirectoryInfo(extractPath).Flatten(); + _directoryService.Flatten(extractPath); } /// diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 84a70fdef..6af4de9f9 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -11,7 +11,6 @@ using System.Threading.Tasks; using System.Web; using API.Data.Metadata; using API.Entities.Enums; -using API.Interfaces.Services; using API.Parser; using Docnet.Core; using Docnet.Core.Converters; @@ -25,17 +24,45 @@ using VersOne.Epub; namespace API.Services { + public interface IBookService + { + int GetNumberOfPages(string filePath); + string GetCoverImage(string fileFilePath, string fileName); + Task> CreateKeyToPageMappingAsync(EpubBookRef book); + + /// + /// Scopes styles to .reading-section and replaces img src to the passed apiBase + /// + /// + /// + /// If the stylesheetHtml contains Import statements, when scoping the filename, scope needs to be wrt filepath. + /// Book Reference, needed for if you expect Import statements + /// + Task ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book); + ComicInfo GetComicInfo(string filePath); + ParserInfo ParseInfo(string filePath); + /// + /// Extracts a PDF file's pages as images to an target directory + /// + /// + /// Where the files will be extracted to. If doesn't exist, will be created. + void ExtractPdfImages(string fileFilePath, string targetDirectory); + } + public class BookService : IBookService { private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; + private readonly IImageService _imageService; private readonly StylesheetParser _cssParser = new (); private static readonly RecyclableMemoryStreamManager StreamManager = new (); private const string CssScopeClass = ".book-content"; - public BookService(ILogger logger) + public BookService(ILogger logger, IDirectoryService directoryService, IImageService imageService) { _logger = logger; - + _directoryService = directoryService; + _imageService = imageService; } private static bool HasClickableHrefPart(HtmlNode anchor) @@ -431,7 +458,7 @@ namespace API.Services public void ExtractPdfImages(string fileFilePath, string targetDirectory) { - DirectoryService.ExistOrCreate(targetDirectory); + _directoryService.ExistOrCreate(targetDirectory); using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); var pages = docReader.GetPageCount(); @@ -473,7 +500,7 @@ namespace API.Services if (coverImageContent == null) return string.Empty; using var stream = coverImageContent.GetContentStream(); - return ImageService.WriteCoverThumbnail(stream, fileName); + return _imageService.WriteCoverThumbnail(stream, fileName); } catch (Exception ex) { @@ -494,7 +521,7 @@ namespace API.Services using var stream = StreamManager.GetStream("BookService.GetPdfPage"); GetPdfPage(docReader, 0, stream); - return ImageService.WriteCoverThumbnail(stream, fileName); + return _imageService.WriteCoverThumbnail(stream, fileName); } catch (Exception ex) diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index a64bde675..8cdd88510 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -4,43 +4,50 @@ using System.IO; using System.Linq; using System.Threading.Tasks; using API.Comparators; +using API.Data; using API.Entities; using API.Entities.Enums; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; using Microsoft.Extensions.Logging; namespace API.Services { + public interface ICacheService + { + /// + /// Ensures the cache is created for the given chapter and if not, will create it. Should be called before any other + /// cache operations (except cleanup). + /// + /// + /// Chapter for the passed chapterId. Side-effect from ensuring cache. + Task Ensure(int chapterId); + /// + /// Clears cache directory of all volumes. This can be invoked from deleting a library or a series. + /// + /// Volumes that belong to that library. Assume the library might have been deleted before this invocation. + void CleanupChapters(IEnumerable chapterIds); + string GetCachedPagePath(Chapter chapter, int page); + string GetCachedEpubFile(int chapterId, Chapter chapter); + public void ExtractChapterFiles(string extractPath, IReadOnlyList files); + } public class CacheService : ICacheService { private readonly ILogger _logger; private readonly IUnitOfWork _unitOfWork; - private readonly IArchiveService _archiveService; private readonly IDirectoryService _directoryService; - private readonly IBookService _bookService; + private readonly IReadingItemService _readingItemService; private readonly NumericComparer _numericComparer; - public CacheService(ILogger logger, IUnitOfWork unitOfWork, IArchiveService archiveService, - IDirectoryService directoryService, IBookService bookService) + public CacheService(ILogger logger, IUnitOfWork unitOfWork, + IDirectoryService directoryService, IReadingItemService readingItemService) { _logger = logger; _unitOfWork = unitOfWork; - _archiveService = archiveService; _directoryService = directoryService; - _bookService = bookService; + _readingItemService = readingItemService; _numericComparer = new NumericComparer(); } - public void EnsureCacheDirectory() - { - if (!DirectoryService.ExistOrCreate(DirectoryService.CacheDirectory)) - { - _logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", DirectoryService.CacheDirectory); - } - } - /// /// Returns the full path to the cached epub file. If the file does not exist, will fallback to the original. /// @@ -50,8 +57,8 @@ namespace API.Services public string GetCachedEpubFile(int chapterId, Chapter chapter) { var extractPath = GetCachePath(chapterId); - var path = Path.Join(extractPath, Path.GetFileName(chapter.Files.First().FilePath)); - if (!(new FileInfo(path).Exists)) + var path = Path.Join(extractPath, _directoryService.FileSystem.Path.GetFileName(chapter.Files.First().FilePath)); + if (!(_directoryService.FileSystem.FileInfo.FromFileName(path).Exists)) { path = chapter.Files.First().FilePath; } @@ -62,14 +69,14 @@ namespace API.Services /// Caches the files for the given chapter to CacheDirectory /// /// - /// This will always return the Chapter for the chpaterId + /// This will always return the Chapter for the chapterId public async Task Ensure(int chapterId) { - EnsureCacheDirectory(); + _directoryService.ExistOrCreate(_directoryService.CacheDirectory); var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); var extractPath = GetCachePath(chapterId); - if (!Directory.Exists(extractPath)) + if (!_directoryService.Exists(extractPath)) { var files = chapter.Files.ToList(); ExtractChapterFiles(extractPath, files); @@ -90,22 +97,12 @@ namespace API.Services var removeNonImages = true; var fileCount = files.Count; var extraPath = ""; - var extractDi = new DirectoryInfo(extractPath); + var extractDi = _directoryService.FileSystem.DirectoryInfo.FromDirectoryName(extractPath); if (files.Count > 0 && files[0].Format == MangaFormat.Image) { - DirectoryService.ExistOrCreate(extractPath); - if (files.Count == 1) - { - _directoryService.CopyFileToDirectory(files[0].FilePath, extractPath); - } - else - { - DirectoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(files[0].FilePath), extractPath, - Parser.Parser.ImageFileExtensions); - } - - extractDi.Flatten(); + _readingItemService.Extract(files[0].FilePath, extractPath, MangaFormat.Image, files.Count); + _directoryService.Flatten(extractDi.FullName); } foreach (var file in files) @@ -117,63 +114,37 @@ namespace API.Services if (file.Format == MangaFormat.Archive) { - _archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath)); + _readingItemService.Extract(file.FilePath, Path.Join(extractPath, extraPath), file.Format); } else if (file.Format == MangaFormat.Pdf) { - _bookService.ExtractPdfImages(file.FilePath, Path.Join(extractPath, extraPath)); + _readingItemService.Extract(file.FilePath, Path.Join(extractPath, extraPath), file.Format); } else if (file.Format == MangaFormat.Epub) { removeNonImages = false; - DirectoryService.ExistOrCreate(extractPath); + _directoryService.ExistOrCreate(extractPath); _directoryService.CopyFileToDirectory(files[0].FilePath, extractPath); } } - extractDi.Flatten(); + _directoryService.Flatten(extractDi.FullName); if (removeNonImages) { - extractDi.RemoveNonImages(); + _directoryService.RemoveNonImages(extractDi.FullName); } } - - public void Cleanup() - { - _logger.LogInformation("Performing cleanup of Cache directory"); - EnsureCacheDirectory(); - - try - { - DirectoryService.ClearDirectory(DirectoryService.CacheDirectory); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); - } - - _logger.LogInformation("Cache directory purged"); - } - /// /// Removes the cached files and folders for a set of chapterIds /// /// public void CleanupChapters(IEnumerable chapterIds) { - _logger.LogInformation("Running Cache cleanup on Chapters"); - foreach (var chapter in chapterIds) { - var di = new DirectoryInfo(GetCachePath(chapter)); - if (di.Exists) - { - di.Delete(true); - } - + _directoryService.ClearDirectory(GetCachePath(chapter)); } - _logger.LogInformation("Cache directory purged"); } @@ -184,46 +155,29 @@ namespace API.Services /// private string GetCachePath(int chapterId) { - return Path.GetFullPath(Path.Join(DirectoryService.CacheDirectory, $"{chapterId}/")); + return _directoryService.FileSystem.Path.GetFullPath(_directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, $"{chapterId}/")); } - public async Task<(string path, MangaFile file)> GetCachedPagePath(Chapter chapter, int page) + /// + /// Returns the absolute path of a cached page. + /// + /// Chapter entity with Files populated. + /// Page number to look for + /// Page filepath or empty if no files found. + public string GetCachedPagePath(Chapter chapter, int page) { // Calculate what chapter the page belongs to - var pagesSoFar = 0; - var chapterFiles = chapter.Files ?? await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id); - foreach (var mangaFile in chapterFiles) + var path = GetCachePath(chapter.Id); + var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); + Array.Sort(files, _numericComparer); + + if (files.Length == 0) { - if (page <= (mangaFile.Pages + pagesSoFar)) - { - var path = GetCachePath(chapter.Id); - var files = DirectoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); - Array.Sort(files, _numericComparer); - - if (files.Length == 0) - { - return (files.ElementAt(0), mangaFile); - } - - // Since array is 0 based, we need to keep that in account (only affects last image) - if (page == files.Length) - { - return (files.ElementAt(page - 1 - pagesSoFar), mangaFile); - } - - if (mangaFile.Format == MangaFormat.Image && mangaFile.Pages == 1) - { - // Each file is one page, meaning we should just get element at page - return (files.ElementAt(page), mangaFile); - } - - return (files.ElementAt(page - pagesSoFar), mangaFile); - } - - pagesSoFar += mangaFile.Pages; + return string.Empty; } - return (string.Empty, null); + // Since array is 0 based, we need to keep that in account (only affects last image) + return page == files.Length ? files.ElementAt(page - 1) : files.ElementAt(page); } } } diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 0d13a2ad2..69e3456f8 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -1,4 +1,5 @@ using System; +using System.Collections; using System.Collections.Generic; using System.Collections.Immutable; using System.IO; @@ -6,30 +7,74 @@ using System.IO.Abstractions; using System.Linq; using System.Text.RegularExpressions; using System.Threading.Tasks; -using API.Interfaces.Services; +using API.Comparators; using Microsoft.Extensions.Logging; namespace API.Services { + public interface IDirectoryService + { + IFileSystem FileSystem { get; } + string CacheDirectory { get; } + string CoverImageDirectory { get; } + string LogDirectory { get; } + string TempDirectory { get; } + string ConfigDirectory { get; } + /// + /// Lists out top-level folders for a given directory. Filters out System and Hidden folders. + /// + /// Absolute path of directory to scan. + /// List of folder names + IEnumerable ListDirectory(string rootPath); + Task ReadFileAsync(string path); + bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = ""); + bool Exists(string directory); + void CopyFileToDirectory(string fullFilePath, string targetDirectory); + int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger); + bool IsDriveMounted(string path); + long GetTotalSize(IEnumerable paths); + void ClearDirectory(string directoryPath); + void ClearAndDeleteDirectory(string directoryPath); + string[] GetFilesWithExtension(string path, string searchPatternExpression = ""); + bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = ""); + + Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, + IList filePaths); + + IEnumerable GetFoldersTillRoot(string rootPath, string fullPath); + + IEnumerable GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly); + + bool ExistOrCreate(string directoryPath); + void DeleteFiles(IEnumerable files); + void RemoveNonImages(string directoryName); + void Flatten(string directoryName); + + } public class DirectoryService : IDirectoryService { - private readonly ILogger _logger; - private readonly IFileSystem _fileSystem; + public IFileSystem FileSystem { get; } + public string CacheDirectory { get; } + public string CoverImageDirectory { get; } + public string LogDirectory { get; } + public string TempDirectory { get; } + public string ConfigDirectory { get; } + private readonly ILogger _logger; private static readonly Regex ExcludeDirectories = new Regex( @"@eaDir|\.DS_Store", RegexOptions.Compiled | RegexOptions.IgnoreCase); - public static readonly string TempDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "temp"); - public static readonly string LogDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "logs"); - public static readonly string CacheDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "cache"); - public static readonly string CoverImageDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "covers"); public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups"); - public static readonly string ConfigDirectory = Path.Join(Directory.GetCurrentDirectory(), "config"); public DirectoryService(ILogger logger, IFileSystem fileSystem) { _logger = logger; - _fileSystem = fileSystem; + FileSystem = fileSystem; + CoverImageDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "covers"); + CacheDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "cache"); + LogDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "logs"); + TempDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "temp"); + ConfigDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config"); } /// @@ -40,16 +85,16 @@ namespace API.Services /// Regex version of search pattern (ie \.mp3|\.mp4). Defaults to * meaning all files. /// SearchOption to use, defaults to TopDirectoryOnly /// List of file paths - private static IEnumerable GetFilesWithCertainExtensions(string path, + private IEnumerable GetFilesWithCertainExtensions(string path, string searchPatternExpression = "", SearchOption searchOption = SearchOption.TopDirectoryOnly) { - if (!Directory.Exists(path)) return ImmutableList.Empty; + if (!FileSystem.Directory.Exists(path)) return ImmutableList.Empty; var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); - return Directory.EnumerateFiles(path, "*", searchOption) + return FileSystem.Directory.EnumerateFiles(path, "*", searchOption) .Where(file => - reSearchPattern.IsMatch(Path.GetExtension(file)) && !Path.GetFileName(file).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); + reSearchPattern.IsMatch(FileSystem.Path.GetExtension(file)) && !FileSystem.Path.GetFileName(file).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); } @@ -61,17 +106,17 @@ namespace API.Services /// /// /// - public static IEnumerable GetFoldersTillRoot(string rootPath, string fullPath) + public IEnumerable GetFoldersTillRoot(string rootPath, string fullPath) { - var separator = Path.AltDirectorySeparatorChar; - if (fullPath.Contains(Path.DirectorySeparatorChar)) + var separator = FileSystem.Path.AltDirectorySeparatorChar; + if (fullPath.Contains(FileSystem.Path.DirectorySeparatorChar)) { - fullPath = fullPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + fullPath = fullPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar); } if (rootPath.Contains(Path.DirectorySeparatorChar)) { - rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + rootPath = rootPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar); } @@ -80,14 +125,15 @@ namespace API.Services var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath; var paths = new List(); // If a file is at the end of the path, remove it before we start processing folders - if (Path.GetExtension(path) != string.Empty) + if (FileSystem.Path.GetExtension(path) != string.Empty) { path = path.Substring(0, path.LastIndexOf(separator)); } - while (Path.GetDirectoryName(path) != Path.GetDirectoryName(root)) + while (FileSystem.Path.GetDirectoryName(path) != Path.GetDirectoryName(root)) { - var folder = new DirectoryInfo(path).Name; + //var folder = new DirectoryInfo(path).Name; + var folder = FileSystem.DirectoryInfo.FromDirectoryName(path).Name; paths.Add(folder); path = path.Substring(0, path.LastIndexOf(separator)); } @@ -102,33 +148,54 @@ namespace API.Services /// public bool Exists(string directory) { - var di = new DirectoryInfo(directory); - return di.Exists; + var di = FileSystem.DirectoryInfo.FromDirectoryName(directory); + return di.Exists; } - public static IEnumerable GetFiles(string path, string searchPatternExpression = "", - SearchOption searchOption = SearchOption.TopDirectoryOnly) + /// + /// Get files given a path. + /// + /// This will automatically filter out restricted files, like MacOsMetadata files + /// + /// An optional regex string to search against. Will use file path to match against. + /// Defaults to top level directory only, can be given all to provide recursive searching + /// + public IEnumerable GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly) { - if (searchPatternExpression != string.Empty) + // TODO: Refactor this and GetFilesWithCertainExtensions to use same implementation + if (!FileSystem.Directory.Exists(path)) return ImmutableList.Empty; + + if (fileNameRegex != string.Empty) { - if (!Directory.Exists(path)) return ImmutableList.Empty; - var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); - return Directory.EnumerateFiles(path, "*", searchOption) + var reSearchPattern = new Regex(fileNameRegex, RegexOptions.IgnoreCase); + return FileSystem.Directory.EnumerateFiles(path, "*", searchOption) .Where(file => - reSearchPattern.IsMatch(file) && !file.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); + { + var fileName = FileSystem.Path.GetFileName(file); + return reSearchPattern.IsMatch(fileName) && + !fileName.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith); + }); } - return !Directory.Exists(path) ? Array.Empty() : Directory.GetFiles(path); + return FileSystem.Directory.EnumerateFiles(path, "*", searchOption).Where(file => + !FileSystem.Path.GetFileName(file).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); } + /// + /// Copies a file into a directory. Does not maintain parent folder of file. + /// Will create target directory if doesn't exist. Automatically overwrites what is there. + /// + /// + /// public void CopyFileToDirectory(string fullFilePath, string targetDirectory) { try { - var fileInfo = new FileInfo(fullFilePath); + var fileInfo = FileSystem.FileInfo.FromFileName(fullFilePath); if (fileInfo.Exists) { - fileInfo.CopyTo(Path.Join(targetDirectory, fileInfo.Name), true); + ExistOrCreate(targetDirectory); + fileInfo.CopyTo(FileSystem.Path.Join(targetDirectory, fileInfo.Name), true); } } catch (Exception ex) @@ -138,19 +205,19 @@ namespace API.Services } /// - /// Copies a Directory with all files and subdirectories to a target location + /// Copies all files and subdirectories within a directory to a target location /// - /// - /// - /// Defaults to *, meaning all files - /// - /// - public static bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "") + /// Directory to copy from. Does not copy the parent folder + /// Destination to copy to. Will be created if doesn't exist + /// Defaults to all files + /// If was successful + /// Thrown when source directory does not exist + public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "") { if (string.IsNullOrEmpty(sourceDirName)) return false; // Get the subdirectories for the specified directory. - var dir = new DirectoryInfo(sourceDirName); + var dir = FileSystem.DirectoryInfo.FromDirectoryName(sourceDirName); if (!dir.Exists) { @@ -165,17 +232,17 @@ namespace API.Services ExistOrCreate(destDirName); // Get the files in the directory and copy them to the new location. - var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => new FileInfo(n)); + var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => FileSystem.FileInfo.FromFileName(n)); foreach (var file in files) { - var tempPath = Path.Combine(destDirName, file.Name); + var tempPath = FileSystem.Path.Combine(destDirName, file.Name); file.CopyTo(tempPath, false); } // If copying subdirectories, copy them and their contents to new location. foreach (var subDir in dirs) { - var tempPath = Path.Combine(destDirName, subDir.Name); + var tempPath = FileSystem.Path.Combine(destDirName, subDir.Name); CopyDirectoryToDirectory(subDir.FullName, tempPath); } @@ -187,19 +254,20 @@ namespace API.Services /// /// /// - public static bool IsDriveMounted(string path) + public bool IsDriveMounted(string path) { - return new DirectoryInfo(Path.GetPathRoot(path) ?? string.Empty).Exists; + return FileSystem.DirectoryInfo.FromDirectoryName(FileSystem.Path.GetPathRoot(path) ?? string.Empty).Exists; } - public static string[] GetFilesWithExtension(string path, string searchPatternExpression = "") + public string[] GetFilesWithExtension(string path, string searchPatternExpression = "") { + // TODO: Use GitFiles instead if (searchPatternExpression != string.Empty) { return GetFilesWithCertainExtensions(path, searchPatternExpression).ToArray(); } - return !Directory.Exists(path) ? Array.Empty() : Directory.GetFiles(path); + return !FileSystem.Directory.Exists(path) ? Array.Empty() : FileSystem.Directory.GetFiles(path); } /// @@ -207,9 +275,9 @@ namespace API.Services /// /// /// Total bytes - public static long GetTotalSize(IEnumerable paths) + public long GetTotalSize(IEnumerable paths) { - return paths.Sum(path => new FileInfo(path).Length); + return paths.Sum(path => FileSystem.FileInfo.FromFileName(path).Length); } /// @@ -217,13 +285,13 @@ namespace API.Services /// /// /// - public static bool ExistOrCreate(string directoryPath) + public bool ExistOrCreate(string directoryPath) { - var di = new DirectoryInfo(directoryPath); + var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); if (di.Exists) return true; try { - Directory.CreateDirectory(directoryPath); + FileSystem.Directory.CreateDirectory(directoryPath); } catch (Exception) { @@ -236,11 +304,11 @@ namespace API.Services /// Deletes all files within the directory, then the directory itself. /// /// - public static void ClearAndDeleteDirectory(string directoryPath) + public void ClearAndDeleteDirectory(string directoryPath) { - if (!Directory.Exists(directoryPath)) return; + if (!FileSystem.Directory.Exists(directoryPath)) return; - DirectoryInfo di = new DirectoryInfo(directoryPath); + var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); ClearDirectory(directoryPath); @@ -248,13 +316,13 @@ namespace API.Services } /// - /// Deletes all files within the directory. + /// Deletes all files and folders within the directory path /// /// /// - public static void ClearDirectory(string directoryPath) + public void ClearDirectory(string directoryPath) { - var di = new DirectoryInfo(directoryPath); + var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath); if (!di.Exists) return; foreach (var file in di.EnumerateFiles()) @@ -274,7 +342,7 @@ namespace API.Services /// /// An optional string to prepend to the target file's name /// - public static bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = "", ILogger logger = null) + public bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = "") { ExistOrCreate(directoryPath); string currentFile = null; @@ -283,36 +351,36 @@ namespace API.Services foreach (var file in filePaths) { currentFile = file; - var fileInfo = new FileInfo(file); + var fileInfo = FileSystem.FileInfo.FromFileName(file); if (fileInfo.Exists) { - fileInfo.CopyTo(Path.Join(directoryPath, prepend + fileInfo.Name)); + fileInfo.CopyTo(FileSystem.Path.Join(directoryPath, prepend + fileInfo.Name)); } else { - logger?.LogWarning("Tried to copy {File} but it doesn't exist", file); + _logger.LogWarning("Tried to copy {File} but it doesn't exist", file); } } } catch (Exception ex) { - logger?.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath); + _logger.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath); return false; } return true; } - public bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath, string prepend = "") - { - return CopyFilesToDirectory(filePaths, directoryPath, prepend, _logger); - } - + /// + /// Lists all directories in a root path. Will exclude Hidden or System directories. + /// + /// + /// public IEnumerable ListDirectory(string rootPath) { - if (!Directory.Exists(rootPath)) return ImmutableList.Empty; + if (!FileSystem.Directory.Exists(rootPath)) return ImmutableList.Empty; - var di = new DirectoryInfo(rootPath); + var di = FileSystem.DirectoryInfo.FromDirectoryName(rootPath); var dirs = di.GetDirectories() .Where(dir => !(dir.Attributes.HasFlag(FileAttributes.Hidden) || dir.Attributes.HasFlag(FileAttributes.System))) .Select(d => d.Name).ToImmutableList(); @@ -320,20 +388,26 @@ namespace API.Services return dirs; } + /// + /// Reads a file's into byte[]. Returns empty array if file doesn't exist. + /// + /// + /// public async Task ReadFileAsync(string path) { - if (!File.Exists(path)) return Array.Empty(); - return await File.ReadAllBytesAsync(path); + if (!FileSystem.File.Exists(path)) return Array.Empty(); + return await FileSystem.File.ReadAllBytesAsync(path); } /// - /// Finds the highest directories from a set of MangaFiles + /// Finds the highest directories from a set of file paths. Does not return the root path, will always select the highest non-root path. /// + /// If the file paths do not contain anything from libraryFolders, this returns an empty dictionary back /// List of top level folders which files belong to /// List of file paths that belong to libraryFolders /// - public static Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, IList filePaths) + public Dictionary FindHighestDirectoriesFromFiles(IEnumerable libraryFolders, IList filePaths) { var stopLookingForDirectories = false; var dirs = new Dictionary(); @@ -385,9 +459,10 @@ namespace API.Services // Data structure to hold names of subfolders to be examined for files. var dirs = new Stack(); - if (!Directory.Exists(root)) { - throw new ArgumentException("The directory doesn't exist"); + if (!FileSystem.Directory.Exists(root)) { + throw new ArgumentException("The directory doesn't exist"); } + dirs.Push(root); while (dirs.Count > 0) { @@ -396,7 +471,7 @@ namespace API.Services string[] files; try { - subDirs = Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0); + subDirs = FileSystem.Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0); } // Thrown if we do not have discovery permission on the directory. catch (UnauthorizedAccessException e) { @@ -412,6 +487,7 @@ namespace API.Services } try { + // TODO: Replace this with GetFiles - It's the same code files = GetFilesWithCertainExtensions(currentDir, searchPattern) .ToArray(); } @@ -457,6 +533,7 @@ namespace API.Services if (ex is UnauthorizedAccessException) { // Here we just output a message and go on. Console.WriteLine(ex.Message); + _logger.LogError(ex, "Unauthorized access on file"); return true; } // Handle other exceptions here if necessary... @@ -478,13 +555,13 @@ namespace API.Services /// Attempts to delete the files passed to it. Swallows exceptions. /// /// Full path of files to delete - public static void DeleteFiles(IEnumerable files) + public void DeleteFiles(IEnumerable files) { foreach (var file in files) { try { - new FileInfo(file).Delete(); + FileSystem.FileInfo.FromFileName(file).Delete(); } catch (Exception) { @@ -547,5 +624,78 @@ namespace API.Services // Return formatted number with suffix return readable.ToString("0.## ") + suffix; } + + /// + /// Removes all files except images from the directory. Includes sub directories. + /// + /// Fully qualified directory + public void RemoveNonImages(string directoryName) + { + DeleteFiles(GetFiles(directoryName, searchOption:SearchOption.AllDirectories).Where(file => !Parser.Parser.IsImage(file))); + } + + + /// + /// Flattens all files in subfolders to the passed directory recursively. + /// + /// + /// foo + /// ├── 1.txt + /// ├── 2.txt + /// ├── 3.txt + /// ├── 4.txt + /// └── bar + /// ├── 1.txt + /// ├── 2.txt + /// └── 5.txt + /// + /// becomes: + /// foo + /// ├── 1.txt + /// ├── 2.txt + /// ├── 3.txt + /// ├── 4.txt + /// ├── bar_1.txt + /// ├── bar_2.txt + /// └── bar_5.txt + /// + /// Fully qualified Directory name + public void Flatten(string directoryName) + { + if (string.IsNullOrEmpty(directoryName) || !FileSystem.Directory.Exists(directoryName)) return; + + var directory = FileSystem.DirectoryInfo.FromDirectoryName(directoryName); + + var index = 0; + FlattenDirectory(directory, directory, ref index); + } + + + private void FlattenDirectory(IDirectoryInfo root, IDirectoryInfo directory, ref int directoryIndex) + { + if (!root.FullName.Equals(directory.FullName)) + { + var fileIndex = 1; + + foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, new NaturalSortComparer())) + { + if (file.Directory == null) continue; + var paddedIndex = Parser.Parser.PadZeros(directoryIndex + ""); + // We need to rename the files so that after flattening, they are in the order we found them + var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}"; + var newPath = Path.Join(root.FullName, newName); + if (!File.Exists(newPath)) file.MoveTo(newPath); + fileIndex++; + } + + directoryIndex++; + } + + var sort = new NaturalSortComparer(); + foreach (var subDirectory in directory.EnumerateDirectories().OrderBy(d => d.FullName, sort)) + { + FlattenDirectory(root, subDirectory, ref directoryIndex); + } + } } } diff --git a/API/Services/DownloadService.cs b/API/Services/DownloadService.cs index 7d0f56b3d..51830f0ab 100644 --- a/API/Services/DownloadService.cs +++ b/API/Services/DownloadService.cs @@ -3,56 +3,54 @@ using System.IO; using System.Linq; using System.Threading.Tasks; using API.Entities; -using API.Interfaces.Services; using Microsoft.AspNetCore.StaticFiles; -namespace API.Services +namespace API.Services; + +public interface IDownloadService { - public interface IDownloadService + Task<(byte[], string, string)> GetFirstFileDownload(IEnumerable files); + string GetContentTypeFromFile(string filepath); +} +public class DownloadService : IDownloadService +{ + private readonly IDirectoryService _directoryService; + private readonly FileExtensionContentTypeProvider _fileTypeProvider = new FileExtensionContentTypeProvider(); + + public DownloadService(IDirectoryService directoryService) { - Task<(byte[], string, string)> GetFirstFileDownload(IEnumerable files); - string GetContentTypeFromFile(string filepath); + _directoryService = directoryService; } - public class DownloadService : IDownloadService + + /// + /// Downloads the first file in the file enumerable for download + /// + /// + /// + public async Task<(byte[], string, string)> GetFirstFileDownload(IEnumerable files) { - private readonly IDirectoryService _directoryService; - private readonly FileExtensionContentTypeProvider _fileTypeProvider = new FileExtensionContentTypeProvider(); + var firstFile = files.Select(c => c.FilePath).First(); + return (await _directoryService.ReadFileAsync(firstFile), GetContentTypeFromFile(firstFile), Path.GetFileName(firstFile)); + } - public DownloadService(IDirectoryService directoryService) + public string GetContentTypeFromFile(string filepath) + { + // Figures out what the content type should be based on the file name. + if (!_fileTypeProvider.TryGetContentType(filepath, out var contentType)) { - _directoryService = directoryService; - } - - /// - /// Downloads the first file in the file enumerable for download - /// - /// - /// - public async Task<(byte[], string, string)> GetFirstFileDownload(IEnumerable files) - { - var firstFile = files.Select(c => c.FilePath).First(); - return (await _directoryService.ReadFileAsync(firstFile), GetContentTypeFromFile(firstFile), Path.GetFileName(firstFile)); - } - - public string GetContentTypeFromFile(string filepath) - { - // Figures out what the content type should be based on the file name. - if (!_fileTypeProvider.TryGetContentType(filepath, out var contentType)) + contentType = Path.GetExtension(filepath).ToLowerInvariant() switch { - contentType = Path.GetExtension(filepath).ToLowerInvariant() switch - { - ".cbz" => "application/zip", - ".cbr" => "application/vnd.rar", - ".cb7" => "application/x-compressed", - ".epub" => "application/epub+zip", - ".7z" => "application/x-7z-compressed", - ".7zip" => "application/x-7z-compressed", - ".pdf" => "application/pdf", - _ => contentType - }; - } - - return contentType; + ".cbz" => "application/zip", + ".cbr" => "application/vnd.rar", + ".cb7" => "application/x-compressed", + ".epub" => "application/epub+zip", + ".7z" => "application/x-7z-compressed", + ".7zip" => "application/x-7z-compressed", + ".pdf" => "application/pdf", + _ => contentType + }; } + + return contentType; } } diff --git a/API/Services/HostedServices/StartupTasksHostedService.cs b/API/Services/HostedServices/StartupTasksHostedService.cs index 486b45513..866a90332 100644 --- a/API/Services/HostedServices/StartupTasksHostedService.cs +++ b/API/Services/HostedServices/StartupTasksHostedService.cs @@ -1,7 +1,6 @@ using System; using System.Threading; using System.Threading.Tasks; -using API.Interfaces; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; diff --git a/API/Services/ImageService.cs b/API/Services/ImageService.cs index 7f663c37d..e2884fbf0 100644 --- a/API/Services/ImageService.cs +++ b/API/Services/ImageService.cs @@ -3,19 +3,40 @@ using System.IO; using System.Linq; using API.Comparators; using API.Entities; -using API.Interfaces.Services; using Microsoft.Extensions.Logging; using NetVips; -namespace API.Services -{ +namespace API.Services; - public class ImageService : IImageService - { +public interface IImageService +{ + void ExtractImages(string fileFilePath, string targetDirectory, int fileCount); + string GetCoverImage(string path, string fileName); + string GetCoverFile(MangaFile file); + + /// + /// Creates a Thumbnail version of an image + /// + /// Path to the image file + /// File name with extension of the file. This will always write to + //string CreateThumbnail(string path, string fileName); + /// + /// Creates a Thumbnail version of a base64 image + /// + /// base64 encoded image + /// File name with extension of the file. This will always write to + string CreateThumbnailFromBase64(string encodedImage, string fileName); + + string WriteCoverThumbnail(Stream stream, string fileName); +} + +public class ImageService : IImageService +{ private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; public const string ChapterCoverImageRegex = @"v\d+_c\d+"; - public const string SeriesCoverImageRegex = @"seres\d+"; - public const string CollectionTagCoverImageRegex = @"tag\d+"; + public const string SeriesCoverImageRegex = @"series_\d+"; + public const string CollectionTagCoverImageRegex = @"tag_\d+"; /// @@ -23,9 +44,24 @@ namespace API.Services /// private const int ThumbnailWidth = 320; - public ImageService(ILogger logger) + public ImageService(ILogger logger, IDirectoryService directoryService) { - _logger = logger; + _logger = logger; + _directoryService = directoryService; + } + + public void ExtractImages(string fileFilePath, string targetDirectory, int fileCount = 1) + { + _directoryService.ExistOrCreate(targetDirectory); + if (fileCount == 1) + { + _directoryService.CopyFileToDirectory(fileFilePath, targetDirectory); + } + else + { + _directoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(fileFilePath), targetDirectory, + Parser.Parser.ImageFileExtensions); + } } /// @@ -35,53 +71,57 @@ namespace API.Services /// public string GetCoverFile(MangaFile file) { - var directory = Path.GetDirectoryName(file.FilePath); - if (string.IsNullOrEmpty(directory)) - { - _logger.LogError("Could not find Directory for {File}", file.FilePath); - return null; - } + var directory = Path.GetDirectoryName(file.FilePath); + if (string.IsNullOrEmpty(directory)) + { + _logger.LogError("Could not find Directory for {File}", file.FilePath); + return null; + } - var firstImage = DirectoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions) - .OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault(); + var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions) + .OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault(); - return firstImage; + return firstImage; } public string GetCoverImage(string path, string fileName) { - if (string.IsNullOrEmpty(path)) return string.Empty; + if (string.IsNullOrEmpty(path)) return string.Empty; - try - { - return CreateThumbnail(path, fileName); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "[GetCoverImage] There was an error and prevented thumbnail generation on {ImageFile}. Defaulting to no cover image", path); - } - - return string.Empty; - } - - /// - public string CreateThumbnail(string path, string fileName) - { try { + //return CreateThumbnail(path, fileName); using var thumbnail = Image.Thumbnail(path, ThumbnailWidth); var filename = fileName + ".png"; - thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, filename)); + thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, filename)); return filename; } - catch (Exception e) + catch (Exception ex) { - _logger.LogError(e, "Error creating thumbnail from url"); + _logger.LogWarning(ex, "[GetCoverImage] There was an error and prevented thumbnail generation on {ImageFile}. Defaulting to no cover image", path); } return string.Empty; } + /// + // public string CreateThumbnail(string path, string fileName) + // { + // try + // { + // using var thumbnail = Image.Thumbnail(path, ThumbnailWidth); + // var filename = fileName + ".png"; + // thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, filename)); + // return filename; + // } + // catch (Exception e) + // { + // _logger.LogError(e, "Error creating thumbnail from url"); + // } + // + // return string.Empty; + // } + /// /// Creates a thumbnail out of a memory stream and saves to with the passed /// fileName and .png extension. @@ -89,11 +129,11 @@ namespace API.Services /// Stream to write to disk. Ensure this is rewinded. /// filename to save as without extension /// File name with extension of the file. This will always write to - public static string WriteCoverThumbnail(Stream stream, string fileName) + public string WriteCoverThumbnail(Stream stream, string fileName) { using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); var filename = fileName + ".png"; - thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png")); + thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, fileName + ".png")); return filename; } @@ -105,7 +145,7 @@ namespace API.Services { using var thumbnail = Image.ThumbnailBuffer(Convert.FromBase64String(encodedImage), ThumbnailWidth); var filename = fileName + ".png"; - thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png")); + thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, fileName + ".png")); return filename; } catch (Exception e) @@ -146,5 +186,4 @@ namespace API.Services { return $"tag{tagId}"; } - } } diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index 81c1c753e..df1fe17d4 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -6,70 +6,53 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; using API.Data; -using API.Data.Metadata; using API.Data.Repositories; using API.Data.Scanner; using API.Entities; using API.Entities.Enums; using API.Extensions; using API.Helpers; -using API.Interfaces; -using API.Interfaces.Services; using API.SignalR; using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Logging; namespace API.Services; +public interface IMetadataService +{ + /// + /// Recalculates metadata for all entities in a library. + /// + /// + /// + Task RefreshMetadata(int libraryId, bool forceUpdate = false); + /// + /// Performs a forced refresh of metatdata just for a series and it's nested entities + /// + /// + /// + Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false); +} + public class MetadataService : IMetadataService { private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; - private readonly IArchiveService _archiveService; - private readonly IBookService _bookService; - private readonly IImageService _imageService; private readonly IHubContext _messageHub; private readonly ICacheHelper _cacheHelper; + private readonly IReadingItemService _readingItemService; + private readonly IDirectoryService _directoryService; private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); public MetadataService(IUnitOfWork unitOfWork, ILogger logger, - IArchiveService archiveService, IBookService bookService, IImageService imageService, - IHubContext messageHub, ICacheHelper cacheHelper) + IHubContext messageHub, ICacheHelper cacheHelper, + IReadingItemService readingItemService, IDirectoryService directoryService) { _unitOfWork = unitOfWork; _logger = logger; - _archiveService = archiveService; - _bookService = bookService; - _imageService = imageService; _messageHub = messageHub; _cacheHelper = cacheHelper; - } - - /// - /// Gets the cover image for the file - /// - /// Has side effect of marking the file as updated - /// - /// - /// - /// - private string GetCoverImage(MangaFile file, int volumeId, int chapterId) - { - //file.UpdateLastModified(); - switch (file.Format) - { - case MangaFormat.Pdf: - case MangaFormat.Epub: - return _bookService.GetCoverImage(file.FilePath, ImageService.GetChapterFormat(chapterId, volumeId)); - case MangaFormat.Image: - var coverImage = _imageService.GetCoverFile(file); - return _imageService.GetCoverImage(coverImage, ImageService.GetChapterFormat(chapterId, volumeId)); - case MangaFormat.Archive: - return _archiveService.GetCoverImage(file.FilePath, ImageService.GetChapterFormat(chapterId, volumeId)); - case MangaFormat.Unknown: - default: - return string.Empty; - } - + _readingItemService = readingItemService; + _directoryService = directoryService; } /// @@ -81,11 +64,13 @@ public class MetadataService : IMetadataService { var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); - if (!_cacheHelper.ShouldUpdateCoverImage(Path.Join(DirectoryService.CoverImageDirectory, chapter.CoverImage), firstFile, chapter.Created, forceUpdate, chapter.CoverImageLocked)) + if (!_cacheHelper.ShouldUpdateCoverImage(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, chapter.CoverImage), firstFile, chapter.Created, forceUpdate, chapter.CoverImageLocked)) return false; + if (firstFile == null) return false; + _logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile?.FilePath); - chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id); + chapter.CoverImage = _readingItemService.GetCoverImage(firstFile.FilePath, ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId), firstFile.Format); return true; } @@ -101,7 +86,8 @@ public class MetadataService : IMetadataService private void UpdateChapterFromComicInfo(Chapter chapter, ICollection allPeople, MangaFile firstFile) { - var comicInfo = GetComicInfo(firstFile); // TODO: Think about letting the higher level loop have access for series to avoid duplicate IO operations + // TODO: Think about letting the higher level loop have access for series to avoid duplicate IO operations + var comicInfo = _readingItemService.GetComicInfo(firstFile.FilePath, firstFile.Format); if (comicInfo == null) return; if (!string.IsNullOrEmpty(comicInfo.Title)) @@ -183,7 +169,7 @@ public class MetadataService : IMetadataService private bool UpdateVolumeCoverImage(Volume volume, bool forceUpdate) { // We need to check if Volume coverImage matches first chapters if forceUpdate is false - if (volume == null || !_cacheHelper.ShouldUpdateCoverImage(Path.Join(DirectoryService.CoverImageDirectory, volume.CoverImage), null, volume.Created, forceUpdate)) return false; + if (volume == null || !_cacheHelper.ShouldUpdateCoverImage(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, volume.CoverImage), null, volume.Created, forceUpdate)) return false; volume.Chapters ??= new List(); var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault(); @@ -203,7 +189,7 @@ public class MetadataService : IMetadataService if (series == null) return; // NOTE: This will fail if we replace the cover of the first volume on a first scan. Because the series will already have a cover image - if (!_cacheHelper.ShouldUpdateCoverImage(Path.Join(DirectoryService.CoverImageDirectory, series.CoverImage), null, series.Created, forceUpdate, series.CoverImageLocked)) + if (!_cacheHelper.ShouldUpdateCoverImage(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, series.CoverImage), null, series.Created, forceUpdate, series.CoverImageLocked)) return; series.Volumes ??= new List(); @@ -237,7 +223,7 @@ public class MetadataService : IMetadataService if (firstFile == null || _cacheHelper.HasFileNotChangedSinceCreationOrLastScan(firstChapter, forceUpdate, firstFile)) return; if (Parser.Parser.IsPdf(firstFile.FilePath)) return; - var comicInfo = GetComicInfo(firstFile); + var comicInfo = _readingItemService.GetComicInfo(firstFile.FilePath, firstFile.Format); if (comicInfo == null) return; @@ -280,7 +266,7 @@ public class MetadataService : IMetadataService var comicInfos = series.Volumes .SelectMany(volume => volume.Chapters) .SelectMany(c => c.Files) - .Select(GetComicInfo) + .Select(file => _readingItemService.GetComicInfo(file.FilePath, file.Format)) .Where(ci => ci != null) .ToList(); @@ -297,23 +283,12 @@ public class MetadataService : IMetadataService } - private ComicInfo GetComicInfo(MangaFile firstFile) - { - if (firstFile?.Format is MangaFormat.Archive or MangaFormat.Epub) - { - return Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetComicInfo(firstFile.FilePath) : _archiveService.GetComicInfo(firstFile.FilePath); - } - - return null; - } - /// /// /// - /// This cannot have any Async code within. It is used within Parallel.ForEach /// /// - private void ProcessSeriesMetadataUpdate(Series series, IDictionary> chapterIds, ICollection allPeople, ICollection allGenres, bool forceUpdate) + private void ProcessSeriesMetadataUpdate(Series series, ICollection allPeople, ICollection allGenres, bool forceUpdate) { _logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName); try @@ -376,7 +351,6 @@ public class MetadataService : IMetadataService }); _logger.LogDebug("[MetadataService] Fetched {SeriesCount} series for refresh", nonLibrarySeries.Count); - var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(nonLibrarySeries.Select(s => s.Id).ToArray()); var allPeople = await _unitOfWork.PersonRepository.GetAllPeople(); var allGenres = await _unitOfWork.GenreRepository.GetAllGenres(); @@ -386,7 +360,7 @@ public class MetadataService : IMetadataService { try { - ProcessSeriesMetadataUpdate(series, chapterIds, allPeople, allGenres, forceUpdate); + ProcessSeriesMetadataUpdate(series, allPeople, allGenres, forceUpdate); } catch (Exception ex) { @@ -497,11 +471,10 @@ public class MetadataService : IMetadataService await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress, MessageFactory.RefreshMetadataProgressEvent(libraryId, 0F)); - var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdWithSeriesIdForSeriesAsync(new [] { seriesId }); var allPeople = await _unitOfWork.PersonRepository.GetAllPeople(); var allGenres = await _unitOfWork.GenreRepository.GetAllGenres(); - ProcessSeriesMetadataUpdate(series, chapterIds, allPeople, allGenres, forceUpdate); + ProcessSeriesMetadataUpdate(series, allPeople, allGenres, forceUpdate); await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadataProgress, MessageFactory.RefreshMetadataProgressEvent(libraryId, 1F)); diff --git a/API/Services/ReaderService.cs b/API/Services/ReaderService.cs new file mode 100644 index 000000000..c39ac3239 --- /dev/null +++ b/API/Services/ReaderService.cs @@ -0,0 +1,319 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using API.Comparators; +using API.Data; +using API.Data.Repositories; +using API.DTOs; +using API.Entities; +using Microsoft.Extensions.Logging; + +namespace API.Services; + +public interface IReaderService +{ + void MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable chapters); + void MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable chapters); + Task SaveReadingProgress(ProgressDto progressDto, int userId); + Task CapPageToChapter(int chapterId, int page); + Task GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId); + Task GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId); +} + +public class ReaderService : IReaderService +{ + private readonly IUnitOfWork _unitOfWork; + private readonly ILogger _logger; + private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); + private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst(); + + public ReaderService(IUnitOfWork unitOfWork, ILogger logger) + { + _unitOfWork = unitOfWork; + _logger = logger; + } + + /// + /// Marks all Chapters as Read by creating or updating UserProgress rows. Does not commit. + /// + /// + /// + /// + public void MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable chapters) + { + foreach (var chapter in chapters) + { + var userProgress = GetUserProgressForChapter(user, chapter); + + if (userProgress == null) + { + user.Progresses.Add(new AppUserProgress + { + PagesRead = chapter.Pages, + VolumeId = chapter.VolumeId, + SeriesId = seriesId, + ChapterId = chapter.Id + }); + } + else + { + userProgress.PagesRead = chapter.Pages; + userProgress.SeriesId = seriesId; + userProgress.VolumeId = chapter.VolumeId; + } + } + } + + /// + /// Marks all Chapters as Unread by creating or updating UserProgress rows. Does not commit. + /// + /// + /// + /// + public void MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable chapters) + { + foreach (var chapter in chapters) + { + var userProgress = GetUserProgressForChapter(user, chapter); + + if (userProgress == null) + { + user.Progresses.Add(new AppUserProgress + { + PagesRead = 0, + VolumeId = chapter.VolumeId, + SeriesId = seriesId, + ChapterId = chapter.Id + }); + } + else + { + userProgress.PagesRead = 0; + userProgress.SeriesId = seriesId; + userProgress.VolumeId = chapter.VolumeId; + } + } + } + + /// + /// Gets the User Progress for a given Chapter. This will handle any duplicates that might have occured in past versions and will delete them. Does not commit. + /// + /// + /// + /// + public static AppUserProgress GetUserProgressForChapter(AppUser user, Chapter chapter) + { + AppUserProgress userProgress = null; + try + { + userProgress = + user.Progresses.SingleOrDefault(x => x.ChapterId == chapter.Id && x.AppUserId == user.Id); + } + catch (Exception) + { + // There is a very rare chance that user progress will duplicate current row. If that happens delete one with less pages + var progresses = user.Progresses.Where(x => x.ChapterId == chapter.Id && x.AppUserId == user.Id).ToList(); + if (progresses.Count > 1) + { + user.Progresses = new List() + { + user.Progresses.First() + }; + userProgress = user.Progresses.First(); + } + } + + return userProgress; + } + + /// + /// Saves progress to DB + /// + /// + /// + /// + public async Task SaveReadingProgress(ProgressDto progressDto, int userId) + { + // Don't let user save past total pages. + progressDto.PageNum = await CapPageToChapter(progressDto.ChapterId, progressDto.PageNum); + + try + { + var userProgress = + await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(progressDto.ChapterId, userId); + + if (userProgress == null) + { + // Create a user object + var userWithProgress = + await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress); + userWithProgress.Progresses ??= new List(); + userWithProgress.Progresses.Add(new AppUserProgress + { + PagesRead = progressDto.PageNum, + VolumeId = progressDto.VolumeId, + SeriesId = progressDto.SeriesId, + ChapterId = progressDto.ChapterId, + BookScrollId = progressDto.BookScrollId, + LastModified = DateTime.Now + }); + _unitOfWork.UserRepository.Update(userWithProgress); + } + else + { + userProgress.PagesRead = progressDto.PageNum; + userProgress.SeriesId = progressDto.SeriesId; + userProgress.VolumeId = progressDto.VolumeId; + userProgress.BookScrollId = progressDto.BookScrollId; + userProgress.LastModified = DateTime.Now; + _unitOfWork.AppUserProgressRepository.Update(userProgress); + } + + if (await _unitOfWork.CommitAsync()) + { + return true; + } + } + catch (Exception exception) + { + _logger.LogError(exception, "Could not save progress"); + await _unitOfWork.RollbackAsync(); + } + + return false; + } + + /// + /// Ensures that the page is within 0 and total pages for a chapter. Makes one DB call. + /// + /// + /// + /// + public async Task CapPageToChapter(int chapterId, int page) + { + var totalPages = await _unitOfWork.ChapterRepository.GetChapterTotalPagesAsync(chapterId); + if (page > totalPages) + { + page = totalPages; + } + + if (page < 0) + { + page = 0; + } + + return page; + } + + /// + /// Tries to find the next logical Chapter + /// + /// + /// V1 → V2 → V3 chapter 0 → V3 chapter 10 → SP 01 → SP 02 + /// + /// + /// + /// + /// + /// -1 if nothing can be found + public async Task GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId) + { + var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList(); + var currentVolume = volumes.Single(v => v.Id == volumeId); + var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId); + + if (currentVolume.Number == 0) + { + // Handle specials by sorting on their Filename aka Range + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()), currentChapter.Number); + if (chapterId > 0) return chapterId; + } + + foreach (var volume in volumes) + { + if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1) + { + // Handle Chapters within current Volume + // In this case, i need 0 first because 0 represents a full volume file. + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting), currentChapter.Number); + if (chapterId > 0) return chapterId; + } + + if (volume.Number != currentVolume.Number + 1) continue; + + // Handle Chapters within next Volume + // ! When selecting the chapter for the next volume, we need to make sure a c0 comes before a c1+ + var chapters = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).ToList(); + if (currentChapter.Number.Equals("0") && chapters.Last().Number.Equals("0")) + { + return chapters.Last().Id; + } + + var firstChapter = chapters.FirstOrDefault(); + if (firstChapter == null) return -1; + return firstChapter.Id; + + } + + return -1; + } + /// + /// Tries to find the prev logical Chapter + /// + /// + /// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← SP 01 ← SP 02 + /// + /// + /// + /// + /// + /// -1 if nothing can be found + public async Task GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId) + { + var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList(); + var currentVolume = volumes.Single(v => v.Id == volumeId); + var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId); + + if (currentVolume.Number == 0) + { + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()).Reverse(), currentChapter.Number); + if (chapterId > 0) return chapterId; + } + + foreach (var volume in volumes) + { + if (volume.Number == currentVolume.Number) + { + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).Reverse(), currentChapter.Number); + if (chapterId > 0) return chapterId; + } + if (volume.Number == currentVolume.Number - 1) + { + var lastChapter = volume.Chapters + .OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).LastOrDefault(); + if (lastChapter == null) return -1; + return lastChapter.Id; + } + } + return -1; + } + + private static int GetNextChapterId(IEnumerable chapters, string currentChapterNumber) + { + var next = false; + var chaptersList = chapters.ToList(); + foreach (var chapter in chaptersList) + { + if (next) + { + return chapter.Id; + } + if (currentChapterNumber.Equals(chapter.Number)) next = true; + } + + return -1; + } + + +} diff --git a/API/Services/ReadingItemService.cs b/API/Services/ReadingItemService.cs new file mode 100644 index 000000000..07b5083af --- /dev/null +++ b/API/Services/ReadingItemService.cs @@ -0,0 +1,126 @@ +using System; +using API.Data.Metadata; +using API.Entities.Enums; +using API.Parser; + +namespace API.Services; + +public interface IReadingItemService +{ + ComicInfo GetComicInfo(string filePath, MangaFormat format); + int GetNumberOfPages(string filePath, MangaFormat format); + string GetCoverImage(string fileFilePath, string fileName, MangaFormat format); + void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1); + ParserInfo Parse(string path, string rootPath, LibraryType type); +} + +public class ReadingItemService : IReadingItemService +{ + private readonly IArchiveService _archiveService; + private readonly IBookService _bookService; + private readonly IImageService _imageService; + private readonly DefaultParser _defaultParser; + + public ReadingItemService(IArchiveService archiveService, IBookService bookService, IImageService imageService, IDirectoryService directoryService) + { + _archiveService = archiveService; + _bookService = bookService; + _imageService = imageService; + + _defaultParser = new DefaultParser(directoryService); + } + + /// + /// Gets the ComicInfo for the file if it exists. Null otherewise. + /// + /// Fully qualified path of file + /// Format of the file determines how we open it (epub vs comicinfo.xml) + /// + public ComicInfo? GetComicInfo(string filePath, MangaFormat format) + { + if (format is MangaFormat.Archive or MangaFormat.Epub) + { + return Parser.Parser.IsEpub(filePath) ? _bookService.GetComicInfo(filePath) : _archiveService.GetComicInfo(filePath); + } + + return null; + } + + /// + /// + /// + /// + /// + /// + public int GetNumberOfPages(string filePath, MangaFormat format) + { + switch (format) + { + case MangaFormat.Archive: + { + return _archiveService.GetNumberOfPagesFromArchive(filePath); + } + case MangaFormat.Pdf: + case MangaFormat.Epub: + { + return _bookService.GetNumberOfPages(filePath); + } + case MangaFormat.Image: + { + return 1; + } + case MangaFormat.Unknown: + default: + return 0; + } + } + + public string GetCoverImage(string filePath, string fileName, MangaFormat format) + { + if (string.IsNullOrEmpty(filePath) || string.IsNullOrEmpty(fileName)) + { + return string.Empty; + } + return format switch + { + MangaFormat.Epub => _bookService.GetCoverImage(filePath, fileName), + MangaFormat.Archive => _archiveService.GetCoverImage(filePath, fileName), + MangaFormat.Image => _imageService.GetCoverImage(filePath, fileName), + _ => string.Empty + }; + } + + /// + /// Extracts the reading item to the target directory using the appropriate method + /// + /// File to extract + /// Where to extract to. Will be created if does not exist + /// Format of the File + /// If the file is of type image, pass number of files needed. If > 0, will copy the whole directory. + /// + public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1) + { + switch (format) + { + case MangaFormat.Pdf: + _bookService.ExtractPdfImages(fileFilePath, targetDirectory); + break; + case MangaFormat.Archive: + _archiveService.ExtractArchive(fileFilePath, targetDirectory); + break; + case MangaFormat.Image: + _imageService.ExtractImages(fileFilePath, targetDirectory, imageCount); + break; + case MangaFormat.Unknown: + case MangaFormat.Epub: + break; + default: + throw new ArgumentOutOfRangeException(nameof(format), format, null); + } + } + + public ParserInfo Parse(string path, string rootPath, LibraryType type) + { + return Parser.Parser.IsEpub(path) ? _bookService.ParseInfo(path) : _defaultParser.Parse(path, rootPath, type); + } +} diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index f042e83de..ec355eeed 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -1,177 +1,187 @@ using System; using System.Threading; using System.Threading.Tasks; +using API.Data; using API.Entities.Enums; using API.Helpers.Converters; -using API.Interfaces; -using API.Interfaces.Services; +using API.Services.Tasks; using Hangfire; using Microsoft.Extensions.Logging; -namespace API.Services +namespace API.Services; + +public interface ITaskScheduler { - public class TaskScheduler : ITaskScheduler + void ScheduleTasks(); + Task ScheduleStatsTasks(); + void ScheduleUpdaterTasks(); + void ScanLibrary(int libraryId, bool forceUpdate = false); + void CleanupChapters(int[] chapterIds); + void RefreshMetadata(int libraryId, bool forceUpdate = true); + void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false); + void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false); + void CancelStatsTasks(); + Task RunStatCollection(); +} +public class TaskScheduler : ITaskScheduler +{ + private readonly ICacheService _cacheService; + private readonly ILogger _logger; + private readonly IScannerService _scannerService; + private readonly IUnitOfWork _unitOfWork; + private readonly IMetadataService _metadataService; + private readonly IBackupService _backupService; + private readonly ICleanupService _cleanupService; + + private readonly IStatsService _statsService; + private readonly IVersionUpdaterService _versionUpdaterService; + private readonly IDirectoryService _directoryService; + + public static BackgroundJobServer Client => new BackgroundJobServer(); + private static readonly Random Rnd = new Random(); + + + public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, + IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, + ICleanupService cleanupService, IStatsService statsService, IVersionUpdaterService versionUpdaterService, + IDirectoryService directoryService) { - private readonly ICacheService _cacheService; - private readonly ILogger _logger; - private readonly IScannerService _scannerService; - private readonly IUnitOfWork _unitOfWork; - private readonly IMetadataService _metadataService; - private readonly IBackupService _backupService; - private readonly ICleanupService _cleanupService; + _cacheService = cacheService; + _logger = logger; + _scannerService = scannerService; + _unitOfWork = unitOfWork; + _metadataService = metadataService; + _backupService = backupService; + _cleanupService = cleanupService; + _statsService = statsService; + _versionUpdaterService = versionUpdaterService; + _directoryService = directoryService; + } - private readonly IStatsService _statsService; - private readonly IVersionUpdaterService _versionUpdaterService; + public void ScheduleTasks() + { + _logger.LogInformation("Scheduling reoccurring tasks"); - public static BackgroundJobServer Client => new BackgroundJobServer(); - private static readonly Random Rnd = new Random(); - - - public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, - IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, - ICleanupService cleanupService, IStatsService statsService, IVersionUpdaterService versionUpdaterService) + var setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).GetAwaiter().GetResult().Value; + if (setting != null) { - _cacheService = cacheService; - _logger = logger; - _scannerService = scannerService; - _unitOfWork = unitOfWork; - _metadataService = metadataService; - _backupService = backupService; - _cleanupService = cleanupService; - _statsService = statsService; - _versionUpdaterService = versionUpdaterService; + var scanLibrarySetting = setting; + _logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting); + RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), + () => CronConverter.ConvertToCronNotation(scanLibrarySetting), TimeZoneInfo.Local); + } + else + { + RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily, TimeZoneInfo.Local); } - public void ScheduleTasks() + setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value; + if (setting != null) { - _logger.LogInformation("Scheduling reoccurring tasks"); - - var setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).GetAwaiter().GetResult().Value; - if (setting != null) - { - var scanLibrarySetting = setting; - _logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting); - RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), - () => CronConverter.ConvertToCronNotation(scanLibrarySetting), TimeZoneInfo.Local); - } - else - { - RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily, TimeZoneInfo.Local); - } - - setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value; - if (setting != null) - { - _logger.LogDebug("Scheduling Backup Task for {Setting}", setting); - RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local); - } - else - { - RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local); - } - - RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local); - + _logger.LogDebug("Scheduling Backup Task for {Setting}", setting); + RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local); + } + else + { + RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local); } - #region StatsTasks + RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local); + + } + + #region StatsTasks - public async Task ScheduleStatsTasks() + public async Task ScheduleStatsTasks() + { + var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection; + if (!allowStatCollection) { - var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection; - if (!allowStatCollection) - { - _logger.LogDebug("User has opted out of stat collection, not registering tasks"); - return; - } - - _logger.LogDebug("Scheduling stat collection daily"); - RecurringJob.AddOrUpdate("report-stats", () => _statsService.Send(), Cron.Daily(Rnd.Next(0, 22)), TimeZoneInfo.Local); + _logger.LogDebug("User has opted out of stat collection, not registering tasks"); + return; } - public void CancelStatsTasks() + _logger.LogDebug("Scheduling stat collection daily"); + RecurringJob.AddOrUpdate("report-stats", () => _statsService.Send(), Cron.Daily(Rnd.Next(0, 22)), TimeZoneInfo.Local); + } + + public void CancelStatsTasks() + { + _logger.LogDebug("Cancelling/Removing StatsTasks"); + + RecurringJob.RemoveIfExists("report-stats"); + } + + /// + /// First time run stat collection. Executes immediately on a background thread. Does not block. + /// + public async Task RunStatCollection() + { + var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection; + if (!allowStatCollection) { - _logger.LogDebug("Cancelling/Removing StatsTasks"); - - RecurringJob.RemoveIfExists("report-stats"); + _logger.LogDebug("User has opted out of stat collection, not sending stats"); + return; } + BackgroundJob.Enqueue(() => _statsService.Send()); + } - /// - /// First time run stat collection. Executes immediately on a background thread. Does not block. - /// - public async Task RunStatCollection() - { - var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection; - if (!allowStatCollection) - { - _logger.LogDebug("User has opted out of stat collection, not sending stats"); - return; - } - BackgroundJob.Enqueue(() => _statsService.Send()); - } + #endregion - #endregion + #region UpdateTasks - #region UpdateTasks + public void ScheduleUpdaterTasks() + { + _logger.LogInformation("Scheduling Auto-Update tasks"); + // Schedule update check between noon and 6pm local time + RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Daily(Rnd.Next(12, 18)), TimeZoneInfo.Local); + } + #endregion - public void ScheduleUpdaterTasks() - { - _logger.LogInformation("Scheduling Auto-Update tasks"); - // Schedule update check between noon and 6pm local time - RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Daily(Rnd.Next(12, 18)), TimeZoneInfo.Local); - } - #endregion + public void ScanLibrary(int libraryId, bool forceUpdate = false) + { + _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); + BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId)); + // When we do a scan, force cache to re-unpack in case page numbers change + BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory()); + } - public void ScanLibrary(int libraryId, bool forceUpdate = false) - { - _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); - BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId)); - // When we do a scan, force cache to re-unpack in case page numbers change - BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory()); - } + public void CleanupChapters(int[] chapterIds) + { + BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds)); + } - public void CleanupChapters(int[] chapterIds) - { - BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds)); - } + public void RefreshMetadata(int libraryId, bool forceUpdate = true) + { + _logger.LogInformation("Enqueuing library metadata refresh for: {LibraryId}", libraryId); + BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); + } - public void RefreshMetadata(int libraryId, bool forceUpdate = true) - { - _logger.LogInformation("Enqueuing library metadata refresh for: {LibraryId}", libraryId); - BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); - } + public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = true) + { + _logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId); + BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate)); + } - public void CleanupTemp() - { - BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(DirectoryService.TempDirectory)); - } + public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false) + { + _logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId); + BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, CancellationToken.None)); + } - public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = true) - { - _logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId); - BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate)); - } + public void BackupDatabase() + { + BackgroundJob.Enqueue(() => _backupService.BackupDatabase()); + } - public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false) - { - _logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId); - BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, CancellationToken.None)); - } - - public void BackupDatabase() - { - BackgroundJob.Enqueue(() => _backupService.BackupDatabase()); - } - - /// - /// Not an external call. Only public so that we can call this for a Task - /// - // ReSharper disable once MemberCanBePrivate.Global - public async Task CheckForUpdate() - { - var update = await _versionUpdaterService.CheckForUpdate(); - await _versionUpdaterService.PushUpdate(update); - } + /// + /// Not an external call. Only public so that we can call this for a Task + /// + // ReSharper disable once MemberCanBePrivate.Global + public async Task CheckForUpdate() + { + var update = await _versionUpdaterService.CheckForUpdate(); + await _versionUpdaterService.PushUpdate(update); } } diff --git a/API/Services/Tasks/BackupService.cs b/API/Services/Tasks/BackupService.cs index 04cb279ec..eaf5d0fda 100644 --- a/API/Services/Tasks/BackupService.cs +++ b/API/Services/Tasks/BackupService.cs @@ -4,204 +4,166 @@ using System.IO; using System.IO.Compression; using System.Linq; using System.Threading.Tasks; +using API.Data; using API.Entities.Enums; using API.Extensions; -using API.Interfaces; -using API.Interfaces.Services; using API.SignalR; using Hangfire; using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; -namespace API.Services.Tasks +namespace API.Services.Tasks; + +public interface IBackupService { - public class BackupService : IBackupService - { - private readonly IUnitOfWork _unitOfWork; - private readonly ILogger _logger; - private readonly IDirectoryService _directoryService; - private readonly IHubContext _messageHub; - - private readonly IList _backupFiles; - - public BackupService(IUnitOfWork unitOfWork, ILogger logger, - IDirectoryService directoryService, IConfiguration config, IHubContext messageHub) - { - _unitOfWork = unitOfWork; - _logger = logger; - _directoryService = directoryService; - _messageHub = messageHub; - - var maxRollingFiles = config.GetMaxRollingFiles(); - var loggingSection = config.GetLoggingFileName(); - var files = LogFiles(maxRollingFiles, loggingSection); - - - _backupFiles = new List() - { - "appsettings.json", - "Hangfire.db", // This is not used atm - "Hangfire-log.db", // This is not used atm - "kavita.db", - "kavita.db-shm", // This wont always be there - "kavita.db-wal" // This wont always be there - }; - - foreach (var file in files.Select(f => (new FileInfo(f)).Name).ToList()) - { - _backupFiles.Add(file); - } - } - - public IEnumerable LogFiles(int maxRollingFiles, string logFileName) - { - var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty; - var fi = new FileInfo(logFileName); - - var files = maxRollingFiles > 0 - ? DirectoryService.GetFiles(DirectoryService.LogDirectory, $@"{Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log") - : new[] {"kavita.log"}; - return files; - } - - /// - /// Will backup anything that needs to be backed up. This includes logs, setting files, bare minimum cover images (just locked and first cover). - /// - [AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)] - public async Task BackupDatabase() - { - _logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now); - var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value; - - _logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory); - if (!DirectoryService.ExistOrCreate(backupDirectory)) - { - _logger.LogCritical("Could not write to {BackupDirectory}; aborting backup", backupDirectory); - return; - } - - await SendProgress(0F); - - var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_"); - var zipPath = Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip"); - - if (File.Exists(zipPath)) - { - _logger.LogInformation("{ZipFile} already exists, aborting", zipPath); - return; - } - - var tempDirectory = Path.Join(DirectoryService.TempDirectory, dateString); - DirectoryService.ExistOrCreate(tempDirectory); - DirectoryService.ClearDirectory(tempDirectory); - - _directoryService.CopyFilesToDirectory( - _backupFiles.Select(file => Path.Join(DirectoryService.ConfigDirectory, file)).ToList(), tempDirectory); - - await SendProgress(0.25F); - - await CopyCoverImagesToBackupDirectory(tempDirectory); - - await SendProgress(0.75F); - - try - { - ZipFile.CreateFromDirectory(tempDirectory, zipPath); - } - catch (AggregateException ex) - { - _logger.LogError(ex, "There was an issue when archiving library backup"); - } - - DirectoryService.ClearAndDeleteDirectory(tempDirectory); - _logger.LogInformation("Database backup completed"); - await SendProgress(1F); - } - - private async Task CopyCoverImagesToBackupDirectory(string tempDirectory) - { - var outputTempDir = Path.Join(tempDirectory, "covers"); - DirectoryService.ExistOrCreate(outputTempDir); - - try - { - var seriesImages = await _unitOfWork.SeriesRepository.GetLockedCoverImagesAsync(); - _directoryService.CopyFilesToDirectory( - seriesImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir); - - var collectionTags = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync(); - _directoryService.CopyFilesToDirectory( - collectionTags.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir); - - var chapterImages = await _unitOfWork.ChapterRepository.GetCoverImagesForLockedChaptersAsync(); - _directoryService.CopyFilesToDirectory( - chapterImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir); - } - catch (IOException) - { - // Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file. - } - - if (!DirectoryService.GetFiles(outputTempDir).Any()) - { - DirectoryService.ClearAndDeleteDirectory(outputTempDir); - } - } - - private async Task SendProgress(float progress) - { - await _messageHub.Clients.All.SendAsync(SignalREvents.BackupDatabaseProgress, - MessageFactory.BackupDatabaseProgressEvent(progress)); - } - - /// - /// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept. - /// - public void CleanupBackups() - { - const int dayThreshold = 30; - _logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now); - var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value; - if (!_directoryService.Exists(backupDirectory)) return; - var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold)); - var allBackups = DirectoryService.GetFiles(backupDirectory).ToList(); - var expiredBackups = allBackups.Select(filename => new FileInfo(filename)) - .Where(f => f.CreationTime > deltaTime) - .ToList(); - if (expiredBackups.Count == allBackups.Count) - { - _logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold); - var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList(); - for (var i = 1; i < toDelete.Count; i++) - { - try - { - toDelete[i].Delete(); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue deleting {FileName}", toDelete[i].Name); - } - } - } - else - { - foreach (var file in expiredBackups) - { - try - { - file.Delete(); - } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue deleting {FileName}", file.Name); - } - } - - } - _logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now); - } - - } + Task BackupDatabase(); + /// + /// Returns a list of full paths of the logs files detailed in . + /// + /// + /// + /// + IEnumerable GetLogFiles(int maxRollingFiles, string logFileName); +} +public class BackupService : IBackupService +{ + private readonly IUnitOfWork _unitOfWork; + private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; + private readonly IHubContext _messageHub; + + private readonly IList _backupFiles; + + public BackupService(ILogger logger, IUnitOfWork unitOfWork, + IDirectoryService directoryService, IConfiguration config, IHubContext messageHub) + { + _unitOfWork = unitOfWork; + _logger = logger; + _directoryService = directoryService; + _messageHub = messageHub; + + var maxRollingFiles = config.GetMaxRollingFiles(); + var loggingSection = config.GetLoggingFileName(); + var files = GetLogFiles(maxRollingFiles, loggingSection); + + + _backupFiles = new List() + { + "appsettings.json", + "Hangfire.db", // This is not used atm + "Hangfire-log.db", // This is not used atm + "kavita.db", + "kavita.db-shm", // This wont always be there + "kavita.db-wal" // This wont always be there + }; + + foreach (var file in files.Select(f => (_directoryService.FileSystem.FileInfo.FromFileName(f)).Name).ToList()) + { + _backupFiles.Add(file); + } + } + + public IEnumerable GetLogFiles(int maxRollingFiles, string logFileName) + { + var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty; + var fi = _directoryService.FileSystem.FileInfo.FromFileName(logFileName); + + var files = maxRollingFiles > 0 + ? _directoryService.GetFiles(_directoryService.LogDirectory, + $@"{_directoryService.FileSystem.Path.GetFileNameWithoutExtension(fi.Name)}{multipleFileRegex}\.log") + : new[] {"kavita.log"}; + return files; + } + + /// + /// Will backup anything that needs to be backed up. This includes logs, setting files, bare minimum cover images (just locked and first cover). + /// + [AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)] + public async Task BackupDatabase() + { + _logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now); + var backupDirectory = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Value; + + _logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory); + if (!_directoryService.ExistOrCreate(backupDirectory)) + { + _logger.LogCritical("Could not write to {BackupDirectory}; aborting backup", backupDirectory); + return; + } + + await SendProgress(0F); + + var dateString = $"{DateTime.Now.ToShortDateString()}_{DateTime.Now.ToLongTimeString()}".Replace("/", "_").Replace(":", "_"); + var zipPath = _directoryService.FileSystem.Path.Join(backupDirectory, $"kavita_backup_{dateString}.zip"); + + if (File.Exists(zipPath)) + { + _logger.LogInformation("{ZipFile} already exists, aborting", zipPath); + return; + } + + var tempDirectory = Path.Join(_directoryService.TempDirectory, dateString); + _directoryService.ExistOrCreate(tempDirectory); + _directoryService.ClearDirectory(tempDirectory); + + _directoryService.CopyFilesToDirectory( + _backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory); + + await SendProgress(0.25F); + + await CopyCoverImagesToBackupDirectory(tempDirectory); + + await SendProgress(0.75F); + + try + { + ZipFile.CreateFromDirectory(tempDirectory, zipPath); + } + catch (AggregateException ex) + { + _logger.LogError(ex, "There was an issue when archiving library backup"); + } + + _directoryService.ClearAndDeleteDirectory(tempDirectory); + _logger.LogInformation("Database backup completed"); + await SendProgress(1F); + } + + private async Task CopyCoverImagesToBackupDirectory(string tempDirectory) + { + var outputTempDir = Path.Join(tempDirectory, "covers"); + _directoryService.ExistOrCreate(outputTempDir); + + try + { + var seriesImages = await _unitOfWork.SeriesRepository.GetLockedCoverImagesAsync(); + _directoryService.CopyFilesToDirectory( + seriesImages.Select(s => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, s)), outputTempDir); + + var collectionTags = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync(); + _directoryService.CopyFilesToDirectory( + collectionTags.Select(s => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, s)), outputTempDir); + + var chapterImages = await _unitOfWork.ChapterRepository.GetCoverImagesForLockedChaptersAsync(); + _directoryService.CopyFilesToDirectory( + chapterImages.Select(s => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, s)), outputTempDir); + } + catch (IOException) + { + // Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file. + } + + if (!_directoryService.GetFiles(outputTempDir).Any()) + { + _directoryService.ClearAndDeleteDirectory(outputTempDir); + } + } + + private async Task SendProgress(float progress) + { + await _messageHub.Clients.All.SendAsync(SignalREvents.BackupDatabaseProgress, + MessageFactory.BackupDatabaseProgressEvent(progress)); + } + } diff --git a/API/Services/Tasks/CleanupService.cs b/API/Services/Tasks/CleanupService.cs index 1ecc9cec5..ba4b94c1e 100644 --- a/API/Services/Tasks/CleanupService.cs +++ b/API/Services/Tasks/CleanupService.cs @@ -1,7 +1,8 @@ -using System.IO; +using System; +using System.Linq; using System.Threading.Tasks; -using API.Interfaces; -using API.Interfaces.Services; +using API.Data; +using API.Entities.Enums; using API.SignalR; using Hangfire; using Microsoft.AspNetCore.SignalR; @@ -9,32 +10,35 @@ using Microsoft.Extensions.Logging; namespace API.Services.Tasks { + public interface ICleanupService + { + Task Cleanup(); + void CleanupCacheDirectory(); + Task DeleteSeriesCoverImages(); + Task DeleteChapterCoverImages(); + Task DeleteTagCoverImages(); + Task CleanupBackups(); + } /// /// Cleans up after operations on reoccurring basis /// public class CleanupService : ICleanupService { - private readonly ICacheService _cacheService; private readonly ILogger _logger; - private readonly IBackupService _backupService; private readonly IUnitOfWork _unitOfWork; private readonly IHubContext _messageHub; + private readonly IDirectoryService _directoryService; - public CleanupService(ICacheService cacheService, ILogger logger, - IBackupService backupService, IUnitOfWork unitOfWork, IHubContext messageHub) + public CleanupService(ILogger logger, + IUnitOfWork unitOfWork, IHubContext messageHub, + IDirectoryService directoryService) { - _cacheService = cacheService; _logger = logger; - _backupService = backupService; _unitOfWork = unitOfWork; _messageHub = messageHub; + _directoryService = directoryService; } - public void CleanupCacheDirectory() - { - _logger.LogInformation("Cleaning cache directory"); - _cacheService.Cleanup(); - } /// /// Cleans up Temp, cache, deleted cover images, and old database backups @@ -45,12 +49,12 @@ namespace API.Services.Tasks _logger.LogInformation("Starting Cleanup"); await SendProgress(0F); _logger.LogInformation("Cleaning temp directory"); - DirectoryService.ClearDirectory(DirectoryService.TempDirectory); + _directoryService.ClearDirectory(_directoryService.TempDirectory); await SendProgress(0.1F); CleanupCacheDirectory(); await SendProgress(0.25F); _logger.LogInformation("Cleaning old database backups"); - _backupService.CleanupBackups(); + await CleanupBackups(); await SendProgress(0.50F); _logger.LogInformation("Cleaning deleted cover images"); await DeleteSeriesCoverImages(); @@ -68,40 +72,84 @@ namespace API.Services.Tasks MessageFactory.CleanupProgressEvent(progress)); } - private async Task DeleteSeriesCoverImages() + /// + /// Removes all series images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteSeriesCoverImages() { var images = await _unitOfWork.SeriesRepository.GetAllCoverImagesAsync(); - var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex); - foreach (var file in files) - { - if (images.Contains(Path.GetFileName(file))) continue; - File.Delete(file); - - } + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.SeriesCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); } - private async Task DeleteChapterCoverImages() + /// + /// Removes all chapter/volume images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteChapterCoverImages() { var images = await _unitOfWork.ChapterRepository.GetAllCoverImagesAsync(); - var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex); - foreach (var file in files) - { - if (images.Contains(Path.GetFileName(file))) continue; - File.Delete(file); - - } + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.ChapterCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); } - private async Task DeleteTagCoverImages() + /// + /// Removes all collection tag images that are not in the database. They must follow filename pattern. + /// + public async Task DeleteTagCoverImages() { var images = await _unitOfWork.CollectionTagRepository.GetAllCoverImagesAsync(); - var files = DirectoryService.GetFiles(DirectoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex); - foreach (var file in files) - { - if (images.Contains(Path.GetFileName(file))) continue; - File.Delete(file); + var files = _directoryService.GetFiles(_directoryService.CoverImageDirectory, ImageService.CollectionTagCoverImageRegex); + _directoryService.DeleteFiles(files.Where(file => !images.Contains(_directoryService.FileSystem.Path.GetFileName(file)))); + } + /// + /// Removes all files and directories in the cache directory + /// + public void CleanupCacheDirectory() + { + _logger.LogInformation("Performing cleanup of Cache directory"); + _directoryService.ExistOrCreate(_directoryService.CacheDirectory); + + try + { + _directoryService.ClearDirectory(_directoryService.CacheDirectory); } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup"); + } + + _logger.LogInformation("Cache directory purged"); + } + + /// + /// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept. + /// + public async Task CleanupBackups() + { + const int dayThreshold = 30; + _logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now); + var backupDirectory = + (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Value; + if (!_directoryService.Exists(backupDirectory)) return; + + var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold)); + var allBackups = _directoryService.GetFiles(backupDirectory).ToList(); + var expiredBackups = allBackups.Select(filename => _directoryService.FileSystem.FileInfo.FromFileName(filename)) + .Where(f => f.CreationTime < deltaTime) + .ToList(); + + if (expiredBackups.Count == allBackups.Count) + { + _logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold); + var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList(); + _directoryService.DeleteFiles(toDelete.Take(toDelete.Count - 1).Select(f => f.FullName)); + } + else + { + _directoryService.DeleteFiles(expiredBackups.Select(f => f.FullName)); + } + _logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now); } } } diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs index 32e108da2..50f966db6 100644 --- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -7,7 +7,6 @@ using System.Linq; using API.Data.Metadata; using API.Entities; using API.Entities.Enums; -using API.Interfaces.Services; using API.Parser; using Microsoft.Extensions.Logging; @@ -24,25 +23,26 @@ namespace API.Services.Tasks.Scanner public class ParseScannedFiles { private readonly ConcurrentDictionary> _scannedSeries; - private readonly IBookService _bookService; private readonly ILogger _logger; - private readonly IArchiveService _archiveService; private readonly IDirectoryService _directoryService; + private readonly IReadingItemService _readingItemService; + private readonly DefaultParser _defaultParser; /// /// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos. /// Each instance is separate from other threads, allowing for no cross over. /// - /// - /// - public ParseScannedFiles(IBookService bookService, ILogger logger, IArchiveService archiveService, - IDirectoryService directoryService) + /// Logger of the parent class that invokes this + /// Directory Service + /// ReadingItemService Service for extracting information on a number of formats + public ParseScannedFiles(ILogger logger, IDirectoryService directoryService, + IReadingItemService readingItemService) { - _bookService = bookService; _logger = logger; - _archiveService = archiveService; _directoryService = directoryService; + _readingItemService = readingItemService; _scannedSeries = new ConcurrentDictionary>(); + _defaultParser = new DefaultParser(_directoryService); } /// @@ -63,12 +63,12 @@ namespace API.Services.Tasks.Scanner { if (Parser.Parser.IsEpub(path)) { - return _bookService.GetComicInfo(path); + return _readingItemService.GetComicInfo(path, MangaFormat.Epub); } if (Parser.Parser.IsComicInfoExtension(path)) { - return _archiveService.GetComicInfo(path); + return _readingItemService.GetComicInfo(path, MangaFormat.Archive); } return null; } @@ -82,15 +82,15 @@ namespace API.Services.Tasks.Scanner /// Library type to determine parsing to perform private void ProcessFile(string path, string rootPath, LibraryType type) { - ParserInfo info; + ParserInfo info = null; if (Parser.Parser.IsEpub(path)) { - info = _bookService.ParseInfo(path); + info = _readingItemService.Parse(path, rootPath, type); } else { - info = Parser.Parser.Parse(path, rootPath, type); + info = _readingItemService.Parse(path, rootPath, type); } // If we couldn't match, log. But don't log if the file parses as a cover image @@ -105,8 +105,8 @@ namespace API.Services.Tasks.Scanner if (Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume) { - info = Parser.Parser.Parse(path, rootPath, type); - var info2 = _bookService.ParseInfo(path); + info = _defaultParser.Parse(path, rootPath, LibraryType.Book); // TODO: Why do I reparse? + var info2 = _readingItemService.Parse(path, rootPath, type); info.Merge(info2); } diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index daebf27cc..7253b613d 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -12,8 +12,6 @@ using API.Entities; using API.Entities.Enums; using API.Extensions; using API.Helpers; -using API.Interfaces; -using API.Interfaces.Services; using API.Parser; using API.Services.Tasks.Scanner; using API.SignalR; @@ -22,33 +20,42 @@ using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Logging; namespace API.Services.Tasks; +public interface IScannerService +{ + /// + /// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite + /// cover images if forceUpdate is true. + /// + /// Library to scan against + Task ScanLibrary(int libraryId); + Task ScanLibraries(); + Task ScanSeries(int libraryId, int seriesId, CancellationToken token); +} public class ScannerService : IScannerService { private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; - private readonly IArchiveService _archiveService; private readonly IMetadataService _metadataService; - private readonly IBookService _bookService; private readonly ICacheService _cacheService; private readonly IHubContext _messageHub; private readonly IFileService _fileService; private readonly IDirectoryService _directoryService; + private readonly IReadingItemService _readingItemService; private readonly NaturalSortComparer _naturalSort = new (); - public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, - IMetadataService metadataService, IBookService bookService, ICacheService cacheService, IHubContext messageHub, - IFileService fileService, IDirectoryService directoryService) + public ScannerService(IUnitOfWork unitOfWork, ILogger logger, + IMetadataService metadataService, ICacheService cacheService, IHubContext messageHub, + IFileService fileService, IDirectoryService directoryService, IReadingItemService readingItemService) { _unitOfWork = unitOfWork; _logger = logger; - _archiveService = archiveService; _metadataService = metadataService; - _bookService = bookService; _cacheService = cacheService; _messageHub = messageHub; _fileService = fileService; _directoryService = directoryService; + _readingItemService = readingItemService; } [DisableConcurrentExecution(timeoutInSeconds: 360)] @@ -63,16 +70,16 @@ public class ScannerService : IScannerService var folderPaths = library.Folders.Select(f => f.Path).ToList(); // Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are - if (folderPaths.Any(f => !DirectoryService.IsDriveMounted(f))) + if (folderPaths.Any(f => !_directoryService.IsDriveMounted(f))) { _logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted"); return; } - var dirs = DirectoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList()); + var dirs = _directoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList()); _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); - var scanner = new ParseScannedFiles(_bookService, _logger, _archiveService, _directoryService); + var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService); var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime); // Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder @@ -120,7 +127,7 @@ public class ScannerService : IScannerService } _logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory", series.OriginalName); - scanner = new ParseScannedFiles(_bookService, _logger, _archiveService, _directoryService); + scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService); parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2); totalFiles += totalFiles2; scanElapsedTime += scanElapsedTime2; @@ -208,7 +215,7 @@ public class ScannerService : IScannerService } // Check if any of the folder roots are not available (ie disconnected from network, etc) and fail if any of them are - if (library.Folders.Any(f => !DirectoryService.IsDriveMounted(f.Path))) + if (library.Folders.Any(f => !_directoryService.IsDriveMounted(f.Path))) { _logger.LogError("Some of the root folders for library are not accessible. Please check that drives are connected and rescan. Scan will be aborted"); return; @@ -218,7 +225,7 @@ public class ScannerService : IScannerService await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress, MessageFactory.ScanLibraryProgressEvent(libraryId, 0)); - var scanner = new ParseScannedFiles(_bookService, _logger, _archiveService, _directoryService); + var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService); var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime); foreach (var folderPath in library.Folders) @@ -618,28 +625,7 @@ public class ScannerService : IScannerService private MangaFile CreateMangaFile(ParserInfo info) { - var pages = 0; - switch (info.Format) - { - case MangaFormat.Archive: - { - pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); - break; - } - case MangaFormat.Pdf: - case MangaFormat.Epub: - { - pages = _bookService.GetNumberOfPages(info.FullFilePath); - break; - } - case MangaFormat.Image: - { - pages = 1; - break; - } - } - - return DbFactory.MangaFile(info.FullFilePath, info.Format, pages); + return DbFactory.MangaFile(info.FullFilePath, info.Format, _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format)); } private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info) @@ -650,23 +636,7 @@ public class ScannerService : IScannerService { existingFile.Format = info.Format; if (!_fileService.HasFileBeenModifiedSince(existingFile.FilePath, existingFile.LastModified) && existingFile.Pages != 0) return; - switch (existingFile.Format) - { - case MangaFormat.Epub: - case MangaFormat.Pdf: - existingFile.Pages = _bookService.GetNumberOfPages(info.FullFilePath); - break; - case MangaFormat.Image: - existingFile.Pages = 1; - break; - case MangaFormat.Unknown: - existingFile.Pages = 0; - break; - case MangaFormat.Archive: - existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); - break; - } - //existingFile.LastModified = File.GetLastWriteTime(info.FullFilePath); // This is messing up our logic on when last modified + existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format); } else { diff --git a/API/Services/Tasks/StatsService.cs b/API/Services/Tasks/StatsService.cs index 0052e0cb4..1b9f25593 100644 --- a/API/Services/Tasks/StatsService.cs +++ b/API/Services/Tasks/StatsService.cs @@ -2,108 +2,111 @@ using System.Net.Http; using System.Runtime.InteropServices; using System.Threading.Tasks; +using API.Data; using API.DTOs.Stats; using API.Entities.Enums; -using API.Interfaces; -using API.Interfaces.Services; using Flurl.Http; using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Http; using Microsoft.Extensions.Logging; -namespace API.Services.Tasks +namespace API.Services.Tasks; + +public interface IStatsService { - public class StatsService : IStatsService + Task Send(); + Task GetServerInfo(); +} +public class StatsService : IStatsService +{ + private readonly ILogger _logger; + private readonly IUnitOfWork _unitOfWork; + private const string ApiUrl = "https://stats.kavitareader.com"; + + public StatsService(ILogger logger, IUnitOfWork unitOfWork) { - private readonly ILogger _logger; - private readonly IUnitOfWork _unitOfWork; - private const string ApiUrl = "https://stats.kavitareader.com"; + _logger = logger; + _unitOfWork = unitOfWork; - public StatsService(ILogger logger, IUnitOfWork unitOfWork) + FlurlHttp.ConfigureClient(ApiUrl, cli => + cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); + } + + /// + /// Due to all instances firing this at the same time, we can DDOS our server. This task when fired will schedule the task to be run + /// randomly over a 6 hour spread + /// + public async Task Send() + { + var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection; + if (!allowStatCollection) { - _logger = logger; - _unitOfWork = unitOfWork; - - FlurlHttp.ConfigureClient(ApiUrl, cli => - cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); + return; } - /// - /// Due to all instances firing this at the same time, we can DDOS our server. This task when fired will schedule the task to be run - /// randomly over a 6 hour spread - /// - public async Task Send() + await SendData(); + } + + /// + /// This must be public for Hangfire. Do not call this directly. + /// + // ReSharper disable once MemberCanBePrivate.Global + public async Task SendData() + { + var data = await GetServerInfo(); + await SendDataToStatsServer(data); + } + + + private async Task SendDataToStatsServer(ServerInfoDto data) + { + var responseContent = string.Empty; + + try { - var allowStatCollection = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).AllowStatCollection; - if (!allowStatCollection) + var response = await (ApiUrl + "/api/v2/stats") + .WithHeader("Accept", "application/json") + .WithHeader("User-Agent", "Kavita") + .WithHeader("x-api-key", "MsnvA2DfQqxSK5jh") + .WithHeader("x-kavita-version", BuildInfo.Version) + .WithHeader("Content-Type", "application/json") + .WithTimeout(TimeSpan.FromSeconds(30)) + .PostJsonAsync(data); + + if (response.StatusCode != StatusCodes.Status200OK) { - return; - } - - await SendData(); - } - - /// - /// This must be public for Hangfire. Do not call this directly. - /// - // ReSharper disable once MemberCanBePrivate.Global - public async Task SendData() - { - var data = await GetServerInfo(); - await SendDataToStatsServer(data); - } - - - private async Task SendDataToStatsServer(ServerInfoDto data) - { - var responseContent = string.Empty; - - try - { - var response = await (ApiUrl + "/api/v2/stats") - .WithHeader("Accept", "application/json") - .WithHeader("User-Agent", "Kavita") - .WithHeader("x-api-key", "MsnvA2DfQqxSK5jh") - .WithHeader("x-kavita-version", BuildInfo.Version) - .WithHeader("Content-Type", "application/json") - .WithTimeout(TimeSpan.FromSeconds(30)) - .PostJsonAsync(data); - - if (response.StatusCode != StatusCodes.Status200OK) - { - _logger.LogError("KavitaStats did not respond successfully. {Content}", response); - } - } - catch (HttpRequestException e) - { - var info = new - { - dataSent = data, - response = responseContent - }; - - _logger.LogError(e, "KavitaStats did not respond successfully. {Content}", info); - } - catch (Exception e) - { - _logger.LogError(e, "An error happened during the request to KavitaStats"); + _logger.LogError("KavitaStats did not respond successfully. {Content}", response); } } - - public async Task GetServerInfo() + catch (HttpRequestException e) { - var installId = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId); - var serverInfo = new ServerInfoDto + var info = new { - InstallId = installId.Value, - Os = RuntimeInformation.OSDescription, - KavitaVersion = BuildInfo.Version.ToString(), - DotnetVersion = Environment.Version.ToString(), - IsDocker = new OsInfo(Array.Empty()).IsDocker, - NumOfCores = Math.Max(Environment.ProcessorCount, 1) + dataSent = data, + response = responseContent }; - return serverInfo; + _logger.LogError(e, "KavitaStats did not respond successfully. {Content}", info); + } + catch (Exception e) + { + _logger.LogError(e, "An error happened during the request to KavitaStats"); } } + + public async Task GetServerInfo() + { + var installId = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallId); + var serverInfo = new ServerInfoDto + { + InstallId = installId.Value, + Os = RuntimeInformation.OSDescription, + KavitaVersion = BuildInfo.Version.ToString(), + DotnetVersion = Environment.Version.ToString(), + IsDocker = new OsInfo(Array.Empty()).IsDocker, + NumOfCores = Math.Max(Environment.ProcessorCount, 1) + }; + + return serverInfo; + } } diff --git a/API/Services/Tasks/VersionUpdaterService.cs b/API/Services/Tasks/VersionUpdaterService.cs index ff9e43462..178111051 100644 --- a/API/Services/Tasks/VersionUpdaterService.cs +++ b/API/Services/Tasks/VersionUpdaterService.cs @@ -4,7 +4,6 @@ using System.Linq; using System.Net.Http; using System.Threading.Tasks; using API.DTOs.Update; -using API.Interfaces.Services; using API.SignalR; using API.SignalR.Presence; using Flurl.Http; @@ -15,149 +14,155 @@ using Microsoft.AspNetCore.SignalR; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; -namespace API.Services.Tasks +namespace API.Services.Tasks; + +internal class GithubReleaseMetadata { - internal class GithubReleaseMetadata - { - /// - /// Name of the Tag - /// v0.4.3 - /// - // ReSharper disable once InconsistentNaming - public string Tag_Name { get; init; } - /// - /// Name of the Release - /// - public string Name { get; init; } - /// - /// Body of the Release - /// - public string Body { get; init; } - /// - /// Url of the release on Github - /// - // ReSharper disable once InconsistentNaming - public string Html_Url { get; init; } - /// - /// Date Release was Published - /// - // ReSharper disable once InconsistentNaming - public string Published_At { get; init; } - } + /// + /// Name of the Tag + /// v0.4.3 + /// + // ReSharper disable once InconsistentNaming + public string Tag_Name { get; init; } + /// + /// Name of the Release + /// + public string Name { get; init; } + /// + /// Body of the Release + /// + public string Body { get; init; } + /// + /// Url of the release on Github + /// + // ReSharper disable once InconsistentNaming + public string Html_Url { get; init; } + /// + /// Date Release was Published + /// + // ReSharper disable once InconsistentNaming + public string Published_At { get; init; } +} - public class UntrustedCertClientFactory : DefaultHttpClientFactory - { - public override HttpMessageHandler CreateMessageHandler() { - return new HttpClientHandler { - ServerCertificateCustomValidationCallback = (_, _, _, _) => true - }; - } - } - - public class VersionUpdaterService : IVersionUpdaterService - { - private readonly ILogger _logger; - private readonly IHubContext _messageHub; - private readonly IPresenceTracker _tracker; - private readonly Markdown _markdown = new MarkdownDeep.Markdown(); -#pragma warning disable S1075 - private static readonly string GithubLatestReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases/latest"; - private static readonly string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases"; -#pragma warning restore S1075 - - public VersionUpdaterService(ILogger logger, IHubContext messageHub, IPresenceTracker tracker) - { - _logger = logger; - _messageHub = messageHub; - _tracker = tracker; - - FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli => - cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); - FlurlHttp.ConfigureClient(GithubAllReleasesUrl, cli => - cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); - } - - /// - /// Fetches the latest release from Github - /// - public async Task CheckForUpdate() - { - var update = await GetGithubRelease(); - return CreateDto(update); - } - - public async Task> GetAllReleases() - { - var updates = await GetGithubReleases(); - return updates.Select(CreateDto); - } - - private UpdateNotificationDto CreateDto(GithubReleaseMetadata update) - { - if (update == null || string.IsNullOrEmpty(update.Tag_Name)) return null; - var updateVersion = new Version(update.Tag_Name.Replace("v", string.Empty)); - var currentVersion = BuildInfo.Version.ToString(); - - if (updateVersion.Revision == -1) - { - currentVersion = currentVersion.Substring(0, currentVersion.LastIndexOf(".", StringComparison.Ordinal)); - } - - return new UpdateNotificationDto() - { - CurrentVersion = currentVersion, - UpdateVersion = updateVersion.ToString(), - UpdateBody = _markdown.Transform(update.Body.Trim()), - UpdateTitle = update.Name, - UpdateUrl = update.Html_Url, - IsDocker = new OsInfo(Array.Empty()).IsDocker, - PublishDate = update.Published_At - }; - } - - public async Task PushUpdate(UpdateNotificationDto update) - { - if (update == null) return; - - var admins = await _tracker.GetOnlineAdmins(); - var updateVersion = new Version(update.CurrentVersion); - - if (BuildInfo.Version < updateVersion) - { - _logger.LogInformation("Server is out of date. Current: {CurrentVersion}. Available: {AvailableUpdate}", BuildInfo.Version, updateVersion); - await SendEvent(update, admins); - } - else if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == Environments.Development) - { - _logger.LogInformation("Server is up to date. Current: {CurrentVersion}", BuildInfo.Version); - await SendEvent(update, admins); - } - } - - private async Task SendEvent(UpdateNotificationDto update, IReadOnlyList admins) - { - await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateAvailable, MessageFactory.UpdateVersionEvent(update)); - } - - - private static async Task GetGithubRelease() - { - var update = await GithubLatestReleasesUrl - .WithHeader("Accept", "application/json") - .WithHeader("User-Agent", "Kavita") - .GetJsonAsync(); - - return update; - } - - private static async Task> GetGithubReleases() - { - var update = await GithubAllReleasesUrl - .WithHeader("Accept", "application/json") - .WithHeader("User-Agent", "Kavita") - .GetJsonAsync>(); - - return update; - } +public class UntrustedCertClientFactory : DefaultHttpClientFactory +{ + public override HttpMessageHandler CreateMessageHandler() { + return new HttpClientHandler { + ServerCertificateCustomValidationCallback = (_, _, _, _) => true + }; + } +} + +public interface IVersionUpdaterService +{ + Task CheckForUpdate(); + Task PushUpdate(UpdateNotificationDto update); + Task> GetAllReleases(); +} + +public class VersionUpdaterService : IVersionUpdaterService +{ + private readonly ILogger _logger; + private readonly IHubContext _messageHub; + private readonly IPresenceTracker _tracker; + private readonly Markdown _markdown = new MarkdownDeep.Markdown(); +#pragma warning disable S1075 + private static readonly string GithubLatestReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases/latest"; + private static readonly string GithubAllReleasesUrl = "https://api.github.com/repos/Kareadita/Kavita/releases"; +#pragma warning restore S1075 + + public VersionUpdaterService(ILogger logger, IHubContext messageHub, IPresenceTracker tracker) + { + _logger = logger; + _messageHub = messageHub; + _tracker = tracker; + + FlurlHttp.ConfigureClient(GithubLatestReleasesUrl, cli => + cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); + FlurlHttp.ConfigureClient(GithubAllReleasesUrl, cli => + cli.Settings.HttpClientFactory = new UntrustedCertClientFactory()); + } + + /// + /// Fetches the latest release from Github + /// + public async Task CheckForUpdate() + { + var update = await GetGithubRelease(); + return CreateDto(update); + } + + public async Task> GetAllReleases() + { + var updates = await GetGithubReleases(); + return updates.Select(CreateDto); + } + + private UpdateNotificationDto CreateDto(GithubReleaseMetadata update) + { + if (update == null || string.IsNullOrEmpty(update.Tag_Name)) return null; + var updateVersion = new Version(update.Tag_Name.Replace("v", string.Empty)); + var currentVersion = BuildInfo.Version.ToString(); + + if (updateVersion.Revision == -1) + { + currentVersion = currentVersion.Substring(0, currentVersion.LastIndexOf(".", StringComparison.Ordinal)); + } + + return new UpdateNotificationDto() + { + CurrentVersion = currentVersion, + UpdateVersion = updateVersion.ToString(), + UpdateBody = _markdown.Transform(update.Body.Trim()), + UpdateTitle = update.Name, + UpdateUrl = update.Html_Url, + IsDocker = new OsInfo(Array.Empty()).IsDocker, + PublishDate = update.Published_At + }; + } + + public async Task PushUpdate(UpdateNotificationDto update) + { + if (update == null) return; + + var admins = await _tracker.GetOnlineAdmins(); + var updateVersion = new Version(update.CurrentVersion); + + if (BuildInfo.Version < updateVersion) + { + _logger.LogInformation("Server is out of date. Current: {CurrentVersion}. Available: {AvailableUpdate}", BuildInfo.Version, updateVersion); + await SendEvent(update, admins); + } + else if (Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") == Environments.Development) + { + _logger.LogInformation("Server is up to date. Current: {CurrentVersion}", BuildInfo.Version); + await SendEvent(update, admins); + } + } + + private async Task SendEvent(UpdateNotificationDto update, IReadOnlyList admins) + { + await _messageHub.Clients.Users(admins).SendAsync(SignalREvents.UpdateAvailable, MessageFactory.UpdateVersionEvent(update)); + } + + + private static async Task GetGithubRelease() + { + var update = await GithubLatestReleasesUrl + .WithHeader("Accept", "application/json") + .WithHeader("User-Agent", "Kavita") + .GetJsonAsync(); + + return update; + } + + private static async Task> GetGithubReleases() + { + var update = await GithubAllReleasesUrl + .WithHeader("Accept", "application/json") + .WithHeader("User-Agent", "Kavita") + .GetJsonAsync>(); + + return update; } } diff --git a/API/Services/TokenService.cs b/API/Services/TokenService.cs index 3b292cb8c..8145b330e 100644 --- a/API/Services/TokenService.cs +++ b/API/Services/TokenService.cs @@ -6,51 +6,54 @@ using System.Security.Claims; using System.Text; using System.Threading.Tasks; using API.Entities; -using API.Interfaces.Services; using Microsoft.AspNetCore.Identity; using Microsoft.Extensions.Configuration; using Microsoft.IdentityModel.Tokens; using JwtRegisteredClaimNames = Microsoft.IdentityModel.JsonWebTokens.JwtRegisteredClaimNames; -namespace API.Services +namespace API.Services; + +public interface ITokenService { - public class TokenService : ITokenService + Task CreateToken(AppUser user); +} + +public class TokenService : ITokenService +{ + private readonly UserManager _userManager; + private readonly SymmetricSecurityKey _key; + + public TokenService(IConfiguration config, UserManager userManager) { - private readonly UserManager _userManager; - private readonly SymmetricSecurityKey _key; - public TokenService(IConfiguration config, UserManager userManager) - { - - _userManager = userManager; - _key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["TokenKey"])); - } - - public async Task CreateToken(AppUser user) - { - var claims = new List - { - new Claim(JwtRegisteredClaimNames.NameId, user.UserName) - }; - - var roles = await _userManager.GetRolesAsync(user); - - claims.AddRange(roles.Select(role => new Claim(ClaimTypes.Role, role))); - - var creds = new SigningCredentials(_key, SecurityAlgorithms.HmacSha512Signature); - - var tokenDescriptor = new SecurityTokenDescriptor() - { - Subject = new ClaimsIdentity(claims), - Expires = DateTime.Now.AddDays(7), - SigningCredentials = creds - }; - - var tokenHandler = new JwtSecurityTokenHandler(); - var token = tokenHandler.CreateToken(tokenDescriptor); - - return tokenHandler.WriteToken(token); - } + _userManager = userManager; + _key = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["TokenKey"])); } -} \ No newline at end of file + + public async Task CreateToken(AppUser user) + { + var claims = new List + { + new Claim(JwtRegisteredClaimNames.NameId, user.UserName) + }; + + var roles = await _userManager.GetRolesAsync(user); + + claims.AddRange(roles.Select(role => new Claim(ClaimTypes.Role, role))); + + var creds = new SigningCredentials(_key, SecurityAlgorithms.HmacSha512Signature); + + var tokenDescriptor = new SecurityTokenDescriptor() + { + Subject = new ClaimsIdentity(claims), + Expires = DateTime.Now.AddDays(7), + SigningCredentials = creds + }; + + var tokenHandler = new JwtSecurityTokenHandler(); + var token = tokenHandler.CreateToken(tokenDescriptor); + + return tokenHandler.WriteToken(token); + } +} diff --git a/API/Services/WarmupServiceStartupTask.cs b/API/Services/WarmupServiceStartupTask.cs deleted file mode 100644 index 36463451a..000000000 --- a/API/Services/WarmupServiceStartupTask.cs +++ /dev/null @@ -1,43 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using API.Interfaces.Services; -using Microsoft.Extensions.DependencyInjection; - -namespace API.Services -{ - public class WarmupServicesStartupTask : IStartupTask - { - private readonly IServiceCollection _services; - private readonly IServiceProvider _provider; - public WarmupServicesStartupTask(IServiceCollection services, IServiceProvider provider) - { - _services = services; - _provider = provider; - } - - public Task ExecuteAsync(CancellationToken cancellationToken = default) - { - using var scope = _provider.CreateScope(); - foreach (var singleton in GetServices(_services)) - { - Console.WriteLine("DI preloading of " + singleton.FullName); - scope.ServiceProvider.GetServices(singleton); - } - - return Task.CompletedTask; - } - - static IEnumerable GetServices(IServiceCollection services) - { - return services - .Where(descriptor => descriptor.ImplementationType != typeof(WarmupServicesStartupTask)) - .Where(descriptor => !descriptor.ServiceType.ContainsGenericParameters) - .Select(descriptor => descriptor.ServiceType) - .Distinct(); - } - } - -} \ No newline at end of file diff --git a/API/SignalR/Presence/PresenceTracker.cs b/API/SignalR/Presence/PresenceTracker.cs index 1453bd0f7..73d6479ff 100644 --- a/API/SignalR/Presence/PresenceTracker.cs +++ b/API/SignalR/Presence/PresenceTracker.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; -using API.Interfaces; +using API.Data; namespace API.SignalR.Presence { diff --git a/UI/Web/src/app/cards/card-item/card-item.component.ts b/UI/Web/src/app/cards/card-item/card-item.component.ts index 0511192ae..f65efaf85 100644 --- a/UI/Web/src/app/cards/card-item/card-item.component.ts +++ b/UI/Web/src/app/cards/card-item/card-item.component.ts @@ -88,7 +88,7 @@ export class CardItemComponent implements OnInit, OnDestroy { isShiftDown: boolean = false; get tooltipTitle() { - if (this.chapterTitle === '') return this.title; + if (this.chapterTitle === '' || this.chapterTitle === null) return this.title; return this.chapterTitle; } diff --git a/UI/Web/src/app/manga-reader/manga-reader.component.ts b/UI/Web/src/app/manga-reader/manga-reader.component.ts index 3ab56c1b7..5697e6e19 100644 --- a/UI/Web/src/app/manga-reader/manga-reader.component.ts +++ b/UI/Web/src/app/manga-reader/manga-reader.component.ts @@ -504,7 +504,7 @@ export class MangaReaderComponent implements OnInit, AfterViewInit, OnDestroy { updateTitle(chapterInfo: ChapterInfo, type: LibraryType) { this.title = chapterInfo.seriesName; - if (chapterInfo.chapterTitle.length > 0) { + if (chapterInfo.chapterTitle != null && chapterInfo.chapterTitle.length > 0) { this.title += ' - ' + chapterInfo.chapterTitle; }