diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj index 11597cb99..d486d9877 100644 --- a/API.Tests/API.Tests.csproj +++ b/API.Tests/API.Tests.csproj @@ -7,14 +7,15 @@ - + + runtime; build; native; contentfiles; analyzers; buildtransitive all - + runtime; build; native; contentfiles; analyzers; buildtransitive all @@ -26,6 +27,7 @@ + diff --git a/API.Tests/ChapterSortComparerTest.cs b/API.Tests/Comparers/ChapterSortComparerTest.cs similarity index 94% rename from API.Tests/ChapterSortComparerTest.cs rename to API.Tests/Comparers/ChapterSortComparerTest.cs index 7ab909ec5..11fecf2c2 100644 --- a/API.Tests/ChapterSortComparerTest.cs +++ b/API.Tests/Comparers/ChapterSortComparerTest.cs @@ -2,7 +2,7 @@ using API.Comparators; using Xunit; -namespace API.Tests +namespace API.Tests.Comparers { public class ChapterSortComparerTest { diff --git a/API.Tests/Comparers/NaturalSortComparerTest.cs b/API.Tests/Comparers/NaturalSortComparerTest.cs new file mode 100644 index 000000000..099da0546 --- /dev/null +++ b/API.Tests/Comparers/NaturalSortComparerTest.cs @@ -0,0 +1,99 @@ +using System; +using System.Linq; +using API.Comparators; +using Xunit; + +namespace API.Tests.Comparers +{ + public class NaturalSortComparerTest + { + private readonly NaturalSortComparer _nc = new NaturalSortComparer(); + + [Theory] + [InlineData( + new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, + new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} + )] + [InlineData( + new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",}, + new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"} + )] + [InlineData( + new[] {"[SCX-Scans]_Vandread_v02_Act02.zip", "[SCX-Scans]_Vandread_v02_Act01.zip",}, + new[] {"[SCX-Scans]_Vandread_v02_Act01.zip", "[SCX-Scans]_Vandread_v02_Act02.zip",} + )] + [InlineData( + new[] {"Frogman v01 001.jpg", "Frogman v01 ch01 p00 Credits.jpg",}, + new[] {"Frogman v01 001.jpg", "Frogman v01 ch01 p00 Credits.jpg",} + )] + [InlineData( + new[] {"001.jpg", "10.jpg",}, + new[] {"001.jpg", "10.jpg",} + )] + [InlineData( + new[] {"10/001.jpg", "10.jpg",}, + new[] {"10.jpg", "10/001.jpg",} + )] + [InlineData( + new[] {"Batman - Black white vol 1 #04.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr"}, + new[] {"Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #04.cbr"} + )] + public void TestNaturalSortComparer(string[] input, string[] expected) + { + Array.Sort(input, _nc); + + var i = 0; + foreach (var s in input) + { + Assert.Equal(s, expected[i]); + i++; + } + } + + + [Theory] + [InlineData( + new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, + new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} + )] + [InlineData( + new[] {"x2.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"}, + new[] {"x2.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"} + )] + [InlineData( + new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",}, + new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"} + )] + [InlineData( + new[] {"[SCX-Scans]_Vandread_v02_Act02.zip", "[SCX-Scans]_Vandread_v02_Act01.zip","[SCX-Scans]_Vandread_v02_Act07.zip",}, + new[] {"[SCX-Scans]_Vandread_v02_Act01.zip", "[SCX-Scans]_Vandread_v02_Act02.zip","[SCX-Scans]_Vandread_v02_Act07.zip",} + )] + [InlineData( + new[] {"Frogman v01 001.jpg", "Frogman v01 ch01 p00 Credits.jpg",}, + new[] {"Frogman v01 001.jpg", "Frogman v01 ch01 p00 Credits.jpg",} + )] + [InlineData( + new[] {"001.jpg", "10.jpg",}, + new[] {"001.jpg", "10.jpg",} + )] + [InlineData( + new[] {"10/001.jpg", "10.jpg",}, + new[] {"10.jpg", "10/001.jpg",} + )] + [InlineData( + new[] {"Batman - Black white vol 1 #04.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr"}, + new[] {"Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #04.cbr"} + )] + public void TestNaturalSortComparerLinq(string[] input, string[] expected) + { + var output = input.OrderBy(c => c, _nc); + + var i = 0; + foreach (var s in output) + { + Assert.Equal(s, expected[i]); + i++; + } + } + } +} \ No newline at end of file diff --git a/API.Tests/Services/StringLogicalComparerTest.cs b/API.Tests/Comparers/StringLogicalComparerTest.cs similarity index 95% rename from API.Tests/Services/StringLogicalComparerTest.cs rename to API.Tests/Comparers/StringLogicalComparerTest.cs index 3ffa0f8a6..ae93b3b46 100644 --- a/API.Tests/Services/StringLogicalComparerTest.cs +++ b/API.Tests/Comparers/StringLogicalComparerTest.cs @@ -2,7 +2,7 @@ using API.Comparators; using Xunit; -namespace API.Tests.Services +namespace API.Tests.Comparers { public class StringLogicalComparerTest { diff --git a/API.Tests/Entities/SeriesTest.cs b/API.Tests/Entities/SeriesTest.cs new file mode 100644 index 000000000..f0cab0239 --- /dev/null +++ b/API.Tests/Entities/SeriesTest.cs @@ -0,0 +1,27 @@ +using API.Data; +using Xunit; + +namespace API.Tests.Entities +{ + /// + /// Tests for + /// + public class SeriesTest + { + [Theory] + [InlineData("Darker than Black")] + public void CreateSeries(string name) + { + var key = API.Parser.Parser.Normalize(name); + var series = DbFactory.Series(name); + Assert.Equal(0, series.Id); + Assert.Equal(0, series.Pages); + Assert.Equal(name, series.Name); + Assert.Null(series.CoverImage); + Assert.Equal(name, series.LocalizedName); + Assert.Equal(name, series.SortName); + Assert.Equal(name, series.OriginalName); + Assert.Equal(key, series.NormalizedName); + } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/ChapterListExtensionsTests.cs b/API.Tests/Extensions/ChapterListExtensionsTests.cs new file mode 100644 index 000000000..2251c660b --- /dev/null +++ b/API.Tests/Extensions/ChapterListExtensionsTests.cs @@ -0,0 +1,86 @@ +using System.Collections.Generic; +using API.Entities; +using API.Entities.Enums; +using API.Extensions; +using API.Parser; +using Xunit; + +namespace API.Tests.Extensions +{ + public class ChapterListExtensionsTests + { + private Chapter CreateChapter(string range, string number, MangaFile file, bool isSpecial) + { + return new Chapter() + { + Range = range, + Number = number, + Files = new List() {file}, + IsSpecial = isSpecial + }; + } + + private MangaFile CreateFile(string file, MangaFormat format) + { + return new MangaFile() + { + FilePath = file, + Format = format + }; + } + + [Fact] + public void GetAnyChapterByRange_Test_ShouldBeNull() + { + var info = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + Filename = "darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var chapterList = new List() + { + CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true) + }; + + var actualChapter = chapterList.GetChapterByRange(info); + + Assert.NotEqual(chapterList[0], actualChapter); + + } + + [Fact] + public void GetAnyChapterByRange_Test_ShouldBeNotNull() + { + var info = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + Filename = "darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var chapterList = new List() + { + CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true) + }; + + var actualChapter = chapterList.GetChapterByRange(info); + + Assert.Equal(chapterList[0], actualChapter); + + } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/FileInfoExtensionsTests.cs b/API.Tests/Extensions/FileInfoExtensionsTests.cs new file mode 100644 index 000000000..2f385b63f --- /dev/null +++ b/API.Tests/Extensions/FileInfoExtensionsTests.cs @@ -0,0 +1,21 @@ +namespace API.Tests.Extensions +{ + public class FileInfoExtensionsTests + { + // [Fact] + // public void DoesLastWriteMatchTest() + // { + // var fi = Substitute.For(); + // fi.LastWriteTime = DateTime.Now; + // + // var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(1)); + // Assert.False(fi.DoesLastWriteMatch(deltaTime)); + // } + // + // [Fact] + // public void IsLastWriteLessThanTest() + // { + // + // } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs new file mode 100644 index 000000000..f6119cc69 --- /dev/null +++ b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs @@ -0,0 +1,41 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities.Enums; +using API.Extensions; +using API.Parser; +using API.Tests.Helpers; +using Xunit; + +namespace API.Tests.Extensions +{ + public class ParserInfoListExtensions + { + [Theory] + [InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})] + public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers) + { + var infos = volumeNumbers.Select(n => new ParserInfo() {Volumes = n}).ToList(); + Assert.Equal(expectedNumbers, infos.DistinctVolumes()); + } + + [Theory] + [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] + [InlineData(new[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] + [InlineData(new[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)] + public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo) + { + var infos = new List(); + foreach (var filename in inputInfos) + { + infos.Add(API.Parser.Parser.Parse( + filename, + string.Empty)); + } + + var files = inputChapters.Select(s => EntityFactory.CreateMangaFile(s, MangaFormat.Archive, 199)).ToList(); + var chapter = EntityFactory.CreateChapter("0-6", false, files); + + Assert.Equal(expectedHasInfo, infos.HasInfo(chapter)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/SeriesExtensionsTests.cs b/API.Tests/Extensions/SeriesExtensionsTests.cs new file mode 100644 index 000000000..86d788036 --- /dev/null +++ b/API.Tests/Extensions/SeriesExtensionsTests.cs @@ -0,0 +1,31 @@ +using API.Entities; +using API.Extensions; +using Xunit; + +namespace API.Tests.Extensions +{ + public class SeriesExtensionsTests + { + [Theory] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)] + [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, true)] + // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, true)] + public void NameInListTest(string[] seriesInput, string[] list, bool expected) + { + var series = new Series() + { + Name = seriesInput[0], + LocalizedName = seriesInput[1], + OriginalName = seriesInput[2], + NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Parser.Parser.Normalize(seriesInput[0]) + }; + + Assert.Equal(expected, series.NameInList(list)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Helpers/EntityFactory.cs b/API.Tests/Helpers/EntityFactory.cs new file mode 100644 index 000000000..b3b09d486 --- /dev/null +++ b/API.Tests/Helpers/EntityFactory.cs @@ -0,0 +1,57 @@ +using System.Collections.Generic; +using API.Entities; +using API.Entities.Enums; + +namespace API.Tests.Helpers +{ + /// + /// Used to help quickly create DB entities for Unit Testing + /// + public static class EntityFactory + { + public static Series CreateSeries(string name) + { + return new Series() + { + Name = name, + SortName = name, + LocalizedName = name, + NormalizedName = API.Parser.Parser.Normalize(name), + Volumes = new List() + }; + } + + public static Volume CreateVolume(string volumeNumber, List chapters = null) + { + return new Volume() + { + Name = volumeNumber, + Pages = 0, + Chapters = chapters ?? new List() + }; + } + + public static Chapter CreateChapter(string range, bool isSpecial, List files = null) + { + return new Chapter() + { + IsSpecial = isSpecial, + Range = range, + Number = API.Parser.Parser.MinimumNumberFromRange(range) + string.Empty, + Files = files ?? new List(), + Pages = 0, + + }; + } + + public static MangaFile CreateMangaFile(string filename, MangaFormat format, int pages) + { + return new MangaFile() + { + FilePath = filename, + Format = format, + Pages = pages + }; + } + } +} \ No newline at end of file diff --git a/API.Tests/Helpers/ParserInfoFactory.cs b/API.Tests/Helpers/ParserInfoFactory.cs new file mode 100644 index 000000000..7dcf564e1 --- /dev/null +++ b/API.Tests/Helpers/ParserInfoFactory.cs @@ -0,0 +1,25 @@ +using System.IO; +using API.Entities.Enums; +using API.Parser; + +namespace API.Tests.Helpers +{ + public static class ParserInfoFactory + { + public static ParserInfo CreateParsedInfo(string series, string volumes, string chapters, string filename, bool isSpecial) + { + return new ParserInfo() + { + Chapters = chapters, + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = Path.Join(@"/manga/", filename), + Filename = filename, + IsSpecial = isSpecial, + Title = Path.GetFileNameWithoutExtension(filename), + Series = series, + Volumes = volumes + }; + } + } +} \ No newline at end of file diff --git a/API.Tests/Helpers/PrivateObjectPrivateType.cs b/API.Tests/Helpers/PrivateObjectPrivateType.cs deleted file mode 100644 index e99016828..000000000 --- a/API.Tests/Helpers/PrivateObjectPrivateType.cs +++ /dev/null @@ -1,1864 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. See LICENSE file in the project root for full license information. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Reflection; - -namespace Microsoft.VisualStudio.TestTools.UnitTesting -{ - /// - /// This class represents the live NON public INTERNAL object in the system - /// - public class PrivateObject - { - // bind everything - private const BindingFlags BindToEveryThing = BindingFlags.Default | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public; - - private static BindingFlags constructorFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.CreateInstance | BindingFlags.NonPublic; - - private object target; // automatically initialized to null - private Type originalType; // automatically initialized to null - - private Dictionary> methodCache; // automatically initialized to null - - /// - /// Initializes a new instance of the class that contains - /// the already existing object of the private class - /// - /// object that serves as starting point to reach the private members - /// the derefrencing string using . that points to the object to be retrived as in m_X.m_Y.m_Z - public PrivateObject(object obj, string memberToAccess) - { - ValidateAccessString(memberToAccess); - - PrivateObject temp = obj as PrivateObject; - if (temp == null) - { - temp = new PrivateObject(obj); - } - - // Split The access string - string[] arr = memberToAccess.Split(new char[] { '.' }); - - for (int i = 0; i < arr.Length; i++) - { - object next = temp.InvokeHelper(arr[i], BindToEveryThing | BindingFlags.Instance | BindingFlags.GetField | BindingFlags.GetProperty, null, CultureInfo.InvariantCulture); - temp = new PrivateObject(next); - } - - this.target = temp.target; - this.originalType = temp.originalType; - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// Name of the assembly - /// fully qualified name - /// Argmenets to pass to the constructor - public PrivateObject(string assemblyName, string typeName, params object[] args) - : this(assemblyName, typeName, null, args) - { - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// Name of the assembly - /// fully qualified name - /// An array of objects representing the number, order, and type of the parameters for the constructor to get - /// Argmenets to pass to the constructor - public PrivateObject(string assemblyName, string typeName, Type[] parameterTypes, object[] args) - : this(Type.GetType(string.Format(CultureInfo.InvariantCulture, "{0}, {1}", typeName, assemblyName), false), parameterTypes, args) - { - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// type of the object to create - /// Argmenets to pass to the constructor - public PrivateObject(Type type, params object[] args) - : this(type, null, args) - { - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// type of the object to create - /// An array of objects representing the number, order, and type of the parameters for the constructor to get - /// Argmenets to pass to the constructor - public PrivateObject(Type type, Type[] parameterTypes, object[] args) - { - object o; - if (parameterTypes != null) - { - ConstructorInfo ci = type.GetConstructor(BindToEveryThing, null, parameterTypes, null); - if (ci == null) - { - throw new ArgumentException("The constructor with the specified signature could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor."); - } - - try - { - o = ci.Invoke(args); - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - else - { - o = Activator.CreateInstance(type, constructorFlags, null, args, null); - } - - this.ConstructFrom(o); - } - - /// - /// Initializes a new instance of the class that wraps - /// the given object. - /// - /// object to wrap - public PrivateObject(object obj) - { - this.ConstructFrom(obj); - } - - /// - /// Initializes a new instance of the class that wraps - /// the given object. - /// - /// object to wrap - /// PrivateType object - public PrivateObject(object obj, PrivateType type) - { - this.target = obj; - this.originalType = type.ReferencedType; - } - - /// - /// Gets or sets the target - /// - public object Target - { - get - { - return this.target; - } - - set - { - this.target = value; - this.originalType = value.GetType(); - } - } - - /// - /// Gets the type of underlying object - /// - public Type RealType - { - get - { - return this.originalType; - } - } - - private Dictionary> GenericMethodCache - { - get - { - if (this.methodCache == null) - { - this.BuildGenericMethodCacheForType(this.originalType); - } - - Debug.Assert(this.methodCache != null, "Invalid method cache for type."); - - return this.methodCache; - } - } - - /// - /// returns the hash code of the target object - /// - /// int representing hashcode of the target object - public override int GetHashCode() - { - Debug.Assert(this.target != null, "target should not be null."); - return this.target.GetHashCode(); - } - - /// - /// Equals - /// - /// Object with whom to compare - /// returns true if the objects are equal. - public override bool Equals(object obj) - { - if (this != obj) - { - Debug.Assert(this.target != null, "target should not be null."); - if (typeof(PrivateObject) == obj?.GetType()) - { - return this.target.Equals(((PrivateObject)obj).target); - } - else - { - return false; - } - } - - return true; - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, params object[] args) - { - return this.Invoke(name, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, Type[] parameterTypes, object[] args) - { - return this.Invoke(name, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// An array of types corresponding to the types of the generic arguments. - /// Result of method call - public object Invoke(string name, Type[] parameterTypes, object[] args, Type[] typeArguments) - { - return this.Invoke(name, BindToEveryThing, parameterTypes, args, CultureInfo.InvariantCulture, typeArguments); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, object[] args, CultureInfo culture) - { - return this.Invoke(name, null, args, culture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.Invoke(name, BindToEveryThing, parameterTypes, args, culture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, params object[] args) - { - return this.Invoke(name, bindingFlags, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - return this.Invoke(name, bindingFlags, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - return this.Invoke(name, bindingFlags, null, args, culture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.Invoke(name, bindingFlags, parameterTypes, args, culture, null); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Culture info - /// An array of types corresponding to the types of the generic arguments. - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture, Type[] typeArguments) - { - if (parameterTypes != null) - { - bindingFlags |= BindToEveryThing | BindingFlags.Instance; - - // Fix up the parameter types - MethodInfo member = this.originalType.GetMethod(name, bindingFlags, null, parameterTypes, null); - - // If the method was not found and type arguments were provided for generic paramaters, - // attempt to look up a generic method. - if ((member == null) && (typeArguments != null)) - { - // This method may contain generic parameters...if so, the previous call to - // GetMethod() will fail because it doesn't fully support generic parameters. - - // Look in the method cache to see if there is a generic method - // on the incoming type that contains the correct signature. - member = this.GetGenericMethodFromCache(name, parameterTypes, typeArguments, bindingFlags, null); - } - - if (member == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - try - { - if (member.IsGenericMethodDefinition) - { - MethodInfo constructed = member.MakeGenericMethod(typeArguments); - return constructed.Invoke(this.target, bindingFlags, null, args, culture); - } - else - { - return member.Invoke(this.target, bindingFlags, null, args, culture); - } - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - else - { - return this.InvokeHelper(name, bindingFlags | BindingFlags.InvokeMethod, args, culture); - } - } - - /// - /// Gets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// the indices of array - /// An arrya of elements. - public object GetArrayElement(string name, params int[] indices) - { - return this.GetArrayElement(name, BindToEveryThing, indices); - } - - /// - /// Sets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// Value to set - /// the indices of array - public void SetArrayElement(string name, object value, params int[] indices) - { - this.SetArrayElement(name, BindToEveryThing, value, indices); - } - - /// - /// Gets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// A bitmask comprised of one or more that specify how the search is conducted. - /// the indices of array - /// An arrya of elements. - public object GetArrayElement(string name, BindingFlags bindingFlags, params int[] indices) - { - Array arr = (Array)this.InvokeHelper(name, BindingFlags.GetField | bindingFlags, null, CultureInfo.InvariantCulture); - return arr.GetValue(indices); - } - - /// - /// Sets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Value to set - /// the indices of array - public void SetArrayElement(string name, BindingFlags bindingFlags, object value, params int[] indices) - { - Array arr = (Array)this.InvokeHelper(name, BindingFlags.GetField | bindingFlags, null, CultureInfo.InvariantCulture); - arr.SetValue(value, indices); - } - - /// - /// Get the field - /// - /// Name of the field - /// The field. - public object GetField(string name) - { - return this.GetField(name, BindToEveryThing); - } - - /// - /// Sets the field - /// - /// Name of the field - /// value to set - public void SetField(string name, object value) - { - this.SetField(name, BindToEveryThing, value); - } - - /// - /// Gets the field - /// - /// Name of the field - /// A bitmask comprised of one or more that specify how the search is conducted. - /// The field. - public object GetField(string name, BindingFlags bindingFlags) - { - return this.InvokeHelper(name, BindingFlags.GetField | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the field - /// - /// Name of the field - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - public void SetField(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelper(name, BindingFlags.SetField | bindingFlags, new object[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Get the field or property - /// - /// Name of the field or property - /// The field or property. - public object GetFieldOrProperty(string name) - { - return this.GetFieldOrProperty(name, BindToEveryThing); - } - - /// - /// Sets the field or property - /// - /// Name of the field or property - /// value to set - public void SetFieldOrProperty(string name, object value) - { - this.SetFieldOrProperty(name, BindToEveryThing, value); - } - - /// - /// Gets the field or property - /// - /// Name of the field or property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// The field or property. - public object GetFieldOrProperty(string name, BindingFlags bindingFlags) - { - return this.InvokeHelper(name, BindingFlags.GetField | BindingFlags.GetProperty | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the field or property - /// - /// Name of the field or property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - public void SetFieldOrProperty(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelper(name, BindingFlags.SetField | BindingFlags.SetProperty | bindingFlags, new object[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Gets the property - /// - /// Name of the property - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, params object[] args) - { - return this.GetProperty(name, null, args); - } - - /// - /// Gets the property - /// - /// Name of the property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, Type[] parameterTypes, object[] args) - { - return this.GetProperty(name, BindToEveryThing, parameterTypes, args); - } - - /// - /// Set the property - /// - /// Name of the property - /// value to set - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, object value, params object[] args) - { - this.SetProperty(name, null, value, args); - } - - /// - /// Set the property - /// - /// Name of the property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// value to set - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, Type[] parameterTypes, object value, object[] args) - { - this.SetProperty(name, BindToEveryThing, value, parameterTypes, args); - } - - /// - /// Gets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, BindingFlags bindingFlags, params object[] args) - { - return this.GetProperty(name, bindingFlags, null, args); - } - - /// - /// Gets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.originalType.GetProperty(name, bindingFlags, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - return pi.GetValue(this.target, args); - } - else - { - return this.InvokeHelper(name, bindingFlags | BindingFlags.GetProperty, args, null); - } - } - - /// - /// Sets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, BindingFlags bindingFlags, object value, params object[] args) - { - this.SetProperty(name, bindingFlags, value, null, args); - } - - /// - /// Sets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, BindingFlags bindingFlags, object value, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.originalType.GetProperty(name, bindingFlags, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - pi.SetValue(this.target, value, args); - } - else - { - object[] pass = new object[(args?.Length ?? 0) + 1]; - pass[0] = value; - args?.CopyTo(pass, 1); - this.InvokeHelper(name, bindingFlags | BindingFlags.SetProperty, pass, null); - } - } - - /// - /// Validate access string - /// - /// access string - private static void ValidateAccessString(string access) - { - if (access.Length == 0) - { - throw new ArgumentException("Access string has invalid syntax."); - } - - string[] arr = access.Split('.'); - foreach (string str in arr) - { - if ((str.Length == 0) || (str.IndexOfAny(new char[] { ' ', '\t', '\n' }) != -1)) - { - throw new ArgumentException("Access string has invalid syntax."); - } - } - } - - /// - /// Invokes the memeber - /// - /// Name of the member - /// Additional attributes - /// Arguments for the invocation - /// Culture - /// Result of the invocation - private object InvokeHelper(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - Debug.Assert(this.target != null, "Internal Error: Null reference is returned for internal object"); - - // Invoke the actual Method - try - { - return this.originalType.InvokeMember(name, bindingFlags, null, this.target, args, culture); - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - - private void ConstructFrom(object obj) - { - this.target = obj; - this.originalType = obj.GetType(); - } - - private void BuildGenericMethodCacheForType(Type t) - { - Debug.Assert(t != null, "type should not be null."); - this.methodCache = new Dictionary>(); - - MethodInfo[] members = t.GetMethods(BindToEveryThing); - LinkedList listByName; // automatically initialized to null - - foreach (MethodInfo member in members) - { - if (member.IsGenericMethod || member.IsGenericMethodDefinition) - { - if (!this.GenericMethodCache.TryGetValue(member.Name, out listByName)) - { - listByName = new LinkedList(); - this.GenericMethodCache.Add(member.Name, listByName); - } - - Debug.Assert(listByName != null, "list should not be null."); - listByName.AddLast(member); - } - } - } - - /// - /// Extracts the most appropriate generic method signature from the current private type. - /// - /// The name of the method in which to search the signature cache. - /// An array of types corresponding to the types of the parameters in which to search. - /// An array of types corresponding to the types of the generic arguments. - /// to further filter the method signatures. - /// Modifiers for parameters. - /// A methodinfo instance. - private MethodInfo GetGenericMethodFromCache(string methodName, Type[] parameterTypes, Type[] typeArguments, BindingFlags bindingFlags, ParameterModifier[] modifiers) - { - Debug.Assert(!string.IsNullOrEmpty(methodName), "Invalid method name."); - Debug.Assert(parameterTypes != null, "Invalid parameter type array."); - Debug.Assert(typeArguments != null, "Invalid type arguments array."); - - // Build a preliminary list of method candidates that contain roughly the same signature. - var methodCandidates = this.GetMethodCandidates(methodName, parameterTypes, typeArguments, bindingFlags, modifiers); - - // Search of ambiguous methods (methods with the same signature). - MethodInfo[] finalCandidates = new MethodInfo[methodCandidates.Count]; - methodCandidates.CopyTo(finalCandidates, 0); - - if ((parameterTypes != null) && (parameterTypes.Length == 0)) - { - for (int i = 0; i < finalCandidates.Length; i++) - { - MethodInfo methodInfo = finalCandidates[i]; - - if (!RuntimeTypeHelper.CompareMethodSigAndName(methodInfo, finalCandidates[0])) - { - throw new AmbiguousMatchException(); - } - } - - // All the methods have the exact same name and sig so return the most derived one. - return RuntimeTypeHelper.FindMostDerivedNewSlotMeth(finalCandidates, finalCandidates.Length) as MethodInfo; - } - - // Now that we have a preliminary list of candidates, select the most appropriate one. - return RuntimeTypeHelper.SelectMethod(bindingFlags, finalCandidates, parameterTypes, modifiers) as MethodInfo; - } - - private LinkedList GetMethodCandidates(string methodName, Type[] parameterTypes, Type[] typeArguments, BindingFlags bindingFlags, ParameterModifier[] modifiers) - { - Debug.Assert(!string.IsNullOrEmpty(methodName), "methodName should not be null."); - Debug.Assert(parameterTypes != null, "parameterTypes should not be null."); - Debug.Assert(typeArguments != null, "typeArguments should not be null."); - - LinkedList methodCandidates = new LinkedList(); - LinkedList methods = null; - - if (!this.GenericMethodCache.TryGetValue(methodName, out methods)) - { - return methodCandidates; - } - - Debug.Assert(methods != null, "methods should not be null."); - - foreach (MethodInfo candidate in methods) - { - bool paramMatch = true; - ParameterInfo[] candidateParams = null; - Type[] genericArgs = candidate.GetGenericArguments(); - Type sourceParameterType = null; - - if (genericArgs.Length != typeArguments.Length) - { - continue; - } - - // Since we can't just get the correct MethodInfo from Reflection, - // we will just match the number of parameters, their order, and their type - var methodCandidate = candidate; - candidateParams = methodCandidate.GetParameters(); - - if (candidateParams.Length != parameterTypes.Length) - { - continue; - } - - // Exact binding - if ((bindingFlags & BindingFlags.ExactBinding) != 0) - { - int i = 0; - - foreach (ParameterInfo candidateParam in candidateParams) - { - sourceParameterType = parameterTypes[i++]; - - if (candidateParam.ParameterType.ContainsGenericParameters) - { - // Since we have a generic parameter here, just make sure the IsArray matches. - if (candidateParam.ParameterType.IsArray != sourceParameterType.IsArray) - { - paramMatch = false; - break; - } - } - else - { - if (candidateParam.ParameterType != sourceParameterType) - { - paramMatch = false; - break; - } - } - } - - if (paramMatch) - { - methodCandidates.AddLast(methodCandidate); - continue; - } - } - else - { - methodCandidates.AddLast(methodCandidate); - } - } - - return methodCandidates; - } - } - - /// - /// This class represents a private class for the Private Accessor functionality. - /// - public class PrivateType - { - /// - /// Binds to everything - /// - private const BindingFlags BindToEveryThing = BindingFlags.Default - | BindingFlags.NonPublic | BindingFlags.Instance - | BindingFlags.Public | BindingFlags.Static | BindingFlags.FlattenHierarchy; - - /// - /// The wrapped type. - /// - private Type type; - - /// - /// Initializes a new instance of the class that contains the private type. - /// - /// Assembly name - /// fully qualified name of the - public PrivateType(string assemblyName, string typeName) - { - Assembly asm = Assembly.Load(assemblyName); - - this.type = asm.GetType(typeName, true); - } - - /// - /// Initializes a new instance of the class that contains - /// the private type from the type object - /// - /// The wrapped Type to create. - public PrivateType(Type type) - { - if (type == null) - { - throw new ArgumentNullException("type"); - } - - this.type = type; - } - - /// - /// Gets the referenced type - /// - public Type ReferencedType => this.type; - - /// - /// Invokes static member - /// - /// Name of the member to InvokeHelper - /// Arguements to the invoction - /// Result of invocation - public object InvokeStatic(string name, params object[] args) - { - return this.InvokeStatic(name, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes static member - /// - /// Name of the member to InvokeHelper - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invoction - /// Result of invocation - public object InvokeStatic(string name, Type[] parameterTypes, object[] args) - { - return this.InvokeStatic(name, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes static member - /// - /// Name of the member to InvokeHelper - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invoction - /// An array of types corresponding to the types of the generic arguments. - /// Result of invocation - public object InvokeStatic(string name, Type[] parameterTypes, object[] args, Type[] typeArguments) - { - return this.InvokeStatic(name, BindToEveryThing, parameterTypes, args, CultureInfo.InvariantCulture, typeArguments); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Arguements to the invocation - /// Culture - /// Result of invocation - public object InvokeStatic(string name, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, null, args, culture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Culture info - /// Result of invocation - public object InvokeStatic(string name, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, BindingFlags.InvokeMethod, parameterTypes, args, culture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// Arguements to the invocation - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, params object[] args) - { - return this.InvokeStatic(name, bindingFlags, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - return this.InvokeStatic(name, bindingFlags, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// Arguements to the invocation - /// Culture - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, bindingFlags, null, args, culture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Culture - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, bindingFlags, parameterTypes, args, culture, null); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Culture - /// An array of types corresponding to the types of the generic arguments. - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture, Type[] typeArguments) - { - if (parameterTypes != null) - { - MethodInfo member = this.type.GetMethod(name, bindingFlags | BindToEveryThing | BindingFlags.Static, null, parameterTypes, null); - if (member == null) - { - throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - try - { - if (member.IsGenericMethodDefinition) - { - MethodInfo constructed = member.MakeGenericMethod(typeArguments); - return constructed.Invoke(null, bindingFlags, null, args, culture); - } - else - { - return member.Invoke(null, bindingFlags, null, args, culture); - } - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner Exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - else - { - return this.InvokeHelperStatic(name, bindingFlags | BindingFlags.InvokeMethod, args, culture); - } - } - - /// - /// Gets the element in static array - /// - /// Name of the array - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to get. For instance, to access a[10][11] the indices would be {10,11} - /// - /// element at the specified location - public object GetStaticArrayElement(string name, params int[] indices) - { - return this.GetStaticArrayElement(name, BindToEveryThing, indices); - } - - /// - /// Sets the memeber of the static array - /// - /// Name of the array - /// value to set - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to set. For instance, to access a[10][11] the array would be {10,11} - /// - public void SetStaticArrayElement(string name, object value, params int[] indices) - { - this.SetStaticArrayElement(name, BindToEveryThing, value, indices); - } - - /// - /// Gets the element in satatic array - /// - /// Name of the array - /// Additional InvokeHelper attributes - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to get. For instance, to access a[10][11] the array would be {10,11} - /// - /// element at the spcified location - public object GetStaticArrayElement(string name, BindingFlags bindingFlags, params int[] indices) - { - Array arr = (Array)this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.GetProperty | bindingFlags, null, CultureInfo.InvariantCulture); - return arr.GetValue(indices); - } - - /// - /// Sets the memeber of the static array - /// - /// Name of the array - /// Additional InvokeHelper attributes - /// value to set - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to set. For instance, to access a[10][11] the array would be {10,11} - /// - public void SetStaticArrayElement(string name, BindingFlags bindingFlags, object value, params int[] indices) - { - Array arr = (Array)this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.GetProperty | BindingFlags.Static | bindingFlags, null, CultureInfo.InvariantCulture); - arr.SetValue(value, indices); - } - - /// - /// Gets the static field - /// - /// Name of the field - /// The static field. - public object GetStaticField(string name) - { - return this.GetStaticField(name, BindToEveryThing); - } - - /// - /// Sets the static field - /// - /// Name of the field - /// Arguement to the invocation - public void SetStaticField(string name, object value) - { - this.SetStaticField(name, BindToEveryThing, value); - } - - /// - /// Gets the static field using specified InvokeHelper attributes - /// - /// Name of the field - /// Additional invocation attributes - /// The static field. - public object GetStaticField(string name, BindingFlags bindingFlags) - { - return this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.Static | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the static field using binding attributes - /// - /// Name of the field - /// Additional InvokeHelper attributes - /// Arguement to the invocation - public void SetStaticField(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelperStatic(name, BindingFlags.SetField | bindingFlags | BindingFlags.Static, new[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Gets the static field or property - /// - /// Name of the field or property - /// The static field or property. - public object GetStaticFieldOrProperty(string name) - { - return this.GetStaticFieldOrProperty(name, BindToEveryThing); - } - - /// - /// Sets the static field or property - /// - /// Name of the field or property - /// Value to be set to field or property - public void SetStaticFieldOrProperty(string name, object value) - { - this.SetStaticFieldOrProperty(name, BindToEveryThing, value); - } - - /// - /// Gets the static field or property using specified InvokeHelper attributes - /// - /// Name of the field or property - /// Additional invocation attributes - /// The static field or property. - public object GetStaticFieldOrProperty(string name, BindingFlags bindingFlags) - { - return this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.GetProperty | BindingFlags.Static | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the static field or property using binding attributes - /// - /// Name of the field or property - /// Additional invocation attributes - /// Value to be set to field or property - public void SetStaticFieldOrProperty(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelperStatic(name, BindingFlags.SetField | BindingFlags.SetProperty | bindingFlags | BindingFlags.Static, new[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Gets the static property - /// - /// Name of the field or property - /// Arguements to the invocation - /// The static property. - public object GetStaticProperty(string name, params object[] args) - { - return this.GetStaticProperty(name, BindToEveryThing, args); - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Value to be set to field or property - /// Arguments to pass to the member to invoke. - public void SetStaticProperty(string name, object value, params object[] args) - { - this.SetStaticProperty(name, BindToEveryThing, value, null, args); - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Value to be set to field or property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - public void SetStaticProperty(string name, object value, Type[] parameterTypes, object[] args) - { - this.SetStaticProperty(name, BindingFlags.SetProperty, value, parameterTypes, args); - } - - /// - /// Gets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// Arguments to pass to the member to invoke. - /// The static property. - public object GetStaticProperty(string name, BindingFlags bindingFlags, params object[] args) - { - return this.GetStaticProperty(name, BindingFlags.GetProperty | BindingFlags.Static | bindingFlags, null, args); - } - - /// - /// Gets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - /// The static property. - public object GetStaticProperty(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.type.GetProperty(name, bindingFlags | BindingFlags.Static, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - return pi.GetValue(null, args); - } - else - { - return this.InvokeHelperStatic(name, bindingFlags | BindingFlags.GetProperty, args, null); - } - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// Value to be set to field or property - /// Optional index values for indexed properties. The indexes of indexed properties are zero-based. This value should be null for non-indexed properties. - public void SetStaticProperty(string name, BindingFlags bindingFlags, object value, params object[] args) - { - this.SetStaticProperty(name, bindingFlags, value, null, args); - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// Value to be set to field or property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - public void SetStaticProperty(string name, BindingFlags bindingFlags, object value, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.type.GetProperty(name, bindingFlags | BindingFlags.Static, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - pi.SetValue(null, value, args); - } - else - { - object[] pass = new object[(args?.Length ?? 0) + 1]; - pass[0] = value; - args?.CopyTo(pass, 1); - this.InvokeHelperStatic(name, bindingFlags | BindingFlags.SetProperty, pass, null); - } - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// Arguements to the invocation - /// Culture - /// Result of invocation - private object InvokeHelperStatic(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - try - { - return this.type.InvokeMember(name, bindingFlags | BindToEveryThing | BindingFlags.Static, null, null, args, culture); - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner Exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - } - - /// - /// Provides method signature discovery for generic methods. - /// - internal class RuntimeTypeHelper - { - /// - /// Compares the method signatures of these two methods. - /// - /// Method1 - /// Method2 - /// True if they are similiar. - internal static bool CompareMethodSigAndName(MethodBase m1, MethodBase m2) - { - ParameterInfo[] params1 = m1.GetParameters(); - ParameterInfo[] params2 = m2.GetParameters(); - - if (params1.Length != params2.Length) - { - return false; - } - - int numParams = params1.Length; - for (int i = 0; i < numParams; i++) - { - if (params1[i].ParameterType != params2[i].ParameterType) - { - return false; - } - } - - return true; - } - - /// - /// Gets the hierarchy depth from the base type of the provided type. - /// - /// The type. - /// The depth. - internal static int GetHierarchyDepth(Type t) - { - int depth = 0; - - Type currentType = t; - do - { - depth++; - currentType = currentType.BaseType; - } - while (currentType != null); - - return depth; - } - - /// - /// Finds most dervied type with the provided information. - /// - /// Candidate matches. - /// Number of matches. - /// The most derived method. - internal static MethodBase FindMostDerivedNewSlotMeth(MethodBase[] match, int cMatches) - { - int deepestHierarchy = 0; - MethodBase methWithDeepestHierarchy = null; - - for (int i = 0; i < cMatches; i++) - { - // Calculate the depth of the hierarchy of the declaring type of the - // current method. - int currentHierarchyDepth = GetHierarchyDepth(match[i].DeclaringType); - - // Two methods with the same hierarchy depth are not allowed. This would - // mean that there are 2 methods with the same name and sig on a given type - // which is not allowed, unless one of them is vararg... - if (currentHierarchyDepth == deepestHierarchy) - { - if (methWithDeepestHierarchy != null) - { - Debug.Assert( - methWithDeepestHierarchy != null && ((match[i].CallingConvention & CallingConventions.VarArgs) - | (methWithDeepestHierarchy.CallingConvention & CallingConventions.VarArgs)) != 0, - "Calling conventions: " + match[i].CallingConvention + " - " + methWithDeepestHierarchy.CallingConvention); - } - - throw new AmbiguousMatchException(); - } - - // Check to see if this method is on the most derived class. - if (currentHierarchyDepth > deepestHierarchy) - { - deepestHierarchy = currentHierarchyDepth; - methWithDeepestHierarchy = match[i]; - } - } - - return methWithDeepestHierarchy; - } - - /// - /// Given a set of methods that match the base criteria, select a method based - /// upon an array of types. This method should return null if no method matches - /// the criteria. - /// - /// Binding specification. - /// Candidate matches - /// Types - /// Parameter modifiers. - /// Matching method. Null if none matches. - internal static MethodBase SelectMethod(BindingFlags bindingAttr, MethodBase[] match, Type[] types, ParameterModifier[] modifiers) - { - if (match == null) - { - throw new ArgumentNullException("match"); - } - - int i; - int j; - - Type[] realTypes = new Type[types.Length]; - for (i = 0; i < types.Length; i++) - { - realTypes[i] = types[i].UnderlyingSystemType; - } - - types = realTypes; - - // If there are no methods to match to, then return null, indicating that no method - // matches the criteria - if (match.Length == 0) - { - return null; - } - - // Find all the methods that can be described by the types parameter. - // Remove all of them that cannot. - int curIdx = 0; - for (i = 0; i < match.Length; i++) - { - ParameterInfo[] par = match[i].GetParameters(); - if (par.Length != types.Length) - { - continue; - } - - for (j = 0; j < types.Length; j++) - { - Type pCls = par[j].ParameterType; - - if (pCls.ContainsGenericParameters) - { - if (pCls.IsArray != types[j].IsArray) - { - break; - } - } - else - { - if (pCls == types[j]) - { - continue; - } - - if (pCls == typeof(object)) - { - continue; - } - else - { - if (!pCls.IsAssignableFrom(types[j])) - { - break; - } - } - } - } - - if (j == types.Length) - { - match[curIdx++] = match[i]; - } - } - - if (curIdx == 0) - { - return null; - } - - if (curIdx == 1) - { - return match[0]; - } - - // Walk all of the methods looking the most specific method to invoke - int currentMin = 0; - bool ambig = false; - int[] paramOrder = new int[types.Length]; - for (i = 0; i < types.Length; i++) - { - paramOrder[i] = i; - } - - for (i = 1; i < curIdx; i++) - { - int newMin = FindMostSpecificMethod(match[currentMin], paramOrder, null, match[i], paramOrder, null, types, null); - if (newMin == 0) - { - ambig = true; - } - else - { - if (newMin == 2) - { - currentMin = i; - ambig = false; - currentMin = i; - } - } - } - - if (ambig) - { - throw new AmbiguousMatchException(); - } - - return match[currentMin]; - } - - /// - /// Finds the most specific method in the two methods provided. - /// - /// Method 1 - /// Parameter order for Method 1 - /// Paramter array type. - /// Method 2 - /// Parameter order for Method 2 - /// >Paramter array type. - /// Types to search in. - /// Args. - /// An int representing the match. - internal static int FindMostSpecificMethod( - MethodBase m1, - int[] paramOrder1, - Type paramArrayType1, - MethodBase m2, - int[] paramOrder2, - Type paramArrayType2, - Type[] types, - object[] args) - { - // Find the most specific method based on the parameters. - int res = FindMostSpecific( - m1.GetParameters(), - paramOrder1, - paramArrayType1, - m2.GetParameters(), - paramOrder2, - paramArrayType2, - types, - args); - - // If the match was not ambiguous then return the result. - if (res != 0) - { - return res; - } - - // Check to see if the methods have the exact same name and signature. - if (CompareMethodSigAndName(m1, m2)) - { - // Determine the depth of the declaring types for both methods. - int hierarchyDepth1 = GetHierarchyDepth(m1.DeclaringType); - int hierarchyDepth2 = GetHierarchyDepth(m2.DeclaringType); - - // The most derived method is the most specific one. - if (hierarchyDepth1 == hierarchyDepth2) - { - return 0; - } - else if (hierarchyDepth1 < hierarchyDepth2) - { - return 2; - } - else - { - return 1; - } - } - - // The match is ambiguous. - return 0; - } - - /// - /// Finds the most specific method in the two methods provided. - /// - /// Method 1 - /// Parameter order for Method 1 - /// Paramter array type. - /// Method 2 - /// Parameter order for Method 2 - /// >Paramter array type. - /// Types to search in. - /// Args. - /// An int representing the match. - internal static int FindMostSpecific( - ParameterInfo[] p1, - int[] paramOrder1, - Type paramArrayType1, - ParameterInfo[] p2, - int[] paramOrder2, - Type paramArrayType2, - Type[] types, - object[] args) - { - // A method using params is always less specific than one not using params - if (paramArrayType1 != null && paramArrayType2 == null) - { - return 2; - } - - if (paramArrayType2 != null && paramArrayType1 == null) - { - return 1; - } - - bool p1Less = false; - bool p2Less = false; - - for (int i = 0; i < types.Length; i++) - { - if (args != null && args[i] == Type.Missing) - { - continue; - } - - Type c1, c2; - - // If a param array is present, then either - // the user re-ordered the parameters in which case - // the argument to the param array is either an array - // in which case the params is conceptually ignored and so paramArrayType1 == null - // or the argument to the param array is a single element - // in which case paramOrder[i] == p1.Length - 1 for that element - // or the user did not re-order the parameters in which case - // the paramOrder array could contain indexes larger than p.Length - 1 - //// so any index >= p.Length - 1 is being put in the param array - - if (paramArrayType1 != null && paramOrder1[i] >= p1.Length - 1) - { - c1 = paramArrayType1; - } - else - { - c1 = p1[paramOrder1[i]].ParameterType; - } - - if (paramArrayType2 != null && paramOrder2[i] >= p2.Length - 1) - { - c2 = paramArrayType2; - } - else - { - c2 = p2[paramOrder2[i]].ParameterType; - } - - if (c1 == c2) - { - continue; - } - - if (c1.ContainsGenericParameters || c2.ContainsGenericParameters) - { - continue; - } - - switch (FindMostSpecificType(c1, c2, types[i])) - { - case 0: - return 0; - case 1: - p1Less = true; - break; - case 2: - p2Less = true; - break; - } - } - - // Two way p1Less and p2Less can be equal. All the arguments are the - // same they both equal false, otherwise there were things that both - // were the most specific type on.... - if (p1Less == p2Less) - { - // it's possible that the 2 methods have same sig and default param in which case we match the one - // with the same number of args but only if they were exactly the same (that is p1Less and p2Lees are both false) - if (!p1Less && p1.Length != p2.Length && args != null) - { - if (p1.Length == args.Length) - { - return 1; - } - else if (p2.Length == args.Length) - { - return 2; - } - } - - return 0; - } - else - { - return (p1Less == true) ? 1 : 2; - } - } - - /// - /// Finds the most specific type in the two provided. - /// - /// Type 1 - /// Type 2 - /// The defining type - /// An int representing the match. - internal static int FindMostSpecificType(Type c1, Type c2, Type t) - { - // If the two types are exact move on... - if (c1 == c2) - { - return 0; - } - - if (c1 == t) - { - return 1; - } - - if (c2 == t) - { - return 2; - } - - bool c1FromC2; - bool c2FromC1; - - if (c1.IsByRef || c2.IsByRef) - { - if (c1.IsByRef && c2.IsByRef) - { - c1 = c1.GetElementType(); - c2 = c2.GetElementType(); - } - else if (c1.IsByRef) - { - if (c1.GetElementType() == c2) - { - return 2; - } - - c1 = c1.GetElementType(); - } - else - { - if (c2.GetElementType() == c1) - { - return 1; - } - - c2 = c2.GetElementType(); - } - } - - if (c1.IsPrimitive && c2.IsPrimitive) - { - c1FromC2 = true; - c2FromC1 = true; - } - else - { - c1FromC2 = c1.IsAssignableFrom(c2); - c2FromC1 = c2.IsAssignableFrom(c1); - } - - if (c1FromC2 == c2FromC1) - { - return 0; - } - - if (c1FromC2) - { - return 2; - } - else - { - return 1; - } - } - } -} \ No newline at end of file diff --git a/API.Tests/Helpers/TestCaseGenerator.cs b/API.Tests/Helpers/TestCaseGenerator.cs new file mode 100644 index 000000000..41b99e5e4 --- /dev/null +++ b/API.Tests/Helpers/TestCaseGenerator.cs @@ -0,0 +1,53 @@ +using System.IO; + +namespace API.Tests.Helpers +{ + /// + /// Given a -testcase.txt file, will generate a folder with fake archive or book files. These files are just renamed txt files. + /// This currently is broken - you cannot create files from a unit test it seems + /// + public static class TestCaseGenerator + { + public static string GenerateFiles(string directory, string fileToExpand) + { + //var files = Directory.GetFiles(directory, fileToExpand); + var file = new FileInfo(fileToExpand); + if (!file.Exists && file.Name.EndsWith("-testcase.txt")) return string.Empty; + + var baseDirectory = TestCaseGenerator.CreateTestBase(fileToExpand, directory); + var filesToCreate = File.ReadLines(file.FullName); + foreach (var fileToCreate in filesToCreate) + { + // var folders = DirectoryService.GetFoldersTillRoot(directory, fileToCreate); + // foreach (var VARIABLE in COLLECTION) + // { + // + // } + File.Create(fileToCreate); + } + + + + + return baseDirectory; + } + + /// + /// Creates and returns a new base directory for data creation for a given testcase + /// + /// + /// + /// + private static string CreateTestBase(string file, string rootDirectory) + { + var baseDir = file.Split("-testcase.txt")[0]; + var newDirectory = Path.Join(rootDirectory, baseDir); + if (!Directory.Exists(newDirectory)) + { + new DirectoryInfo(newDirectory).Create(); + } + + return newDirectory; + } + } +} \ No newline at end of file diff --git a/API.Tests/Parser/BookParserTests.cs b/API.Tests/Parser/BookParserTests.cs new file mode 100644 index 000000000..abeff081d --- /dev/null +++ b/API.Tests/Parser/BookParserTests.cs @@ -0,0 +1,14 @@ +using Xunit; + +namespace API.Tests.Parser +{ + public class BookParserTests + { + [Theory] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")] + public void ParseSeriesTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs new file mode 100644 index 000000000..9d91a5feb --- /dev/null +++ b/API.Tests/Parser/ComicParserTests.cs @@ -0,0 +1,69 @@ +using Xunit; + +namespace API.Tests.Parser +{ + public class ComicParserTests + { + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "Spider-Man & Wolverine")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")] + [InlineData("Batman & Daredevil - King of New York", "Batman & Daredevil - King of New York")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "Batman & Grendel")] + [InlineData("Batman & Robin the Teen Wonder #0", "Batman & Robin the Teen Wonder")] + [InlineData("Batman & Wildcat (1 of 3)", "Batman & Wildcat")] + [InlineData("Batman And Superman World's Finest #01", "Batman And Superman World's Finest")] + [InlineData("Babe 01", "Babe")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "Scott Pilgrim")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] + [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] + [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] + public void ParseComicSeriesTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); + } + + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "1")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "4")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "1")] + [InlineData("Batman & Daredevil - King of New York", "0")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] + [InlineData("Batman & Robin the Teen Wonder #0", "0")] + [InlineData("Batman & Wildcat (1 of 3)", "0")] + [InlineData("Batman And Superman World's Finest #01", "1")] + [InlineData("Babe 01", "1")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] + [InlineData("Superman v1 024 (09-10 1943)", "1")] + public void ParseComicVolumeTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); + } + + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")] + [InlineData("Batman & Daredevil - King of New York", "0")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] + [InlineData("Batman & Robin the Teen Wonder #0", "0")] + [InlineData("Batman & Wildcat (1 of 3)", "1")] + [InlineData("Batman & Wildcat (2 of 3)", "2")] + [InlineData("Batman And Superman World's Finest #01", "0")] + [InlineData("Babe 01", "0")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Superman v1 024 (09-10 1943)", "24")] + [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] + public void ParseComicChapterTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); + } + } +} \ No newline at end of file diff --git a/API.Tests/ParserTest.cs b/API.Tests/Parser/MangaParserTests.cs similarity index 68% rename from API.Tests/ParserTest.cs rename to API.Tests/Parser/MangaParserTests.cs index 2aa0f30ff..fa932dfeb 100644 --- a/API.Tests/ParserTest.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -1,18 +1,16 @@ -using System.Collections.Generic; +using System.Collections.Generic; using API.Entities.Enums; using API.Parser; using Xunit; using Xunit.Abstractions; -using static API.Parser.Parser; -namespace API.Tests +namespace API.Tests.Parser { - public class ParserTests + public class MangaParserTests { private readonly ITestOutputHelper _testOutputHelper; - - public ParserTests(ITestOutputHelper testOutputHelper) + public MangaParserTests(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; } @@ -52,9 +50,21 @@ namespace API.Tests [InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")] [InlineData("Kodomo no Jikan vol. 1.cbz", "1")] [InlineData("Kodomo no Jikan vol. 10.cbz", "10")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 12 [Dametrans][v2]", "0")] + [InlineData("Vagabond_v03", "3")] + [InlineData("Mujaki No Rakune Volume 10.cbz", "10")] + [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "3")] + [InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")] + [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")] + [InlineData("Gantz.V26.cbz", "26")] + [InlineData("NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar", "4")] + [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "1")] + [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")] + [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "1")] + public void ParseVolumeTest(string filename, string expected) { - Assert.Equal(expected, ParseVolume(filename)); + Assert.Equal(expected, API.Parser.Parser.ParseVolume(filename)); } [Theory] @@ -103,11 +113,35 @@ namespace API.Tests [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09", "Kedouin Makoto - Corpse Party Musume")] [InlineData("Goblin Slayer Side Story - Year One 025.5", "Goblin Slayer Side Story - Year One")] [InlineData("Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)", "Goblin Slayer - Brand New Day")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01 [Dametrans][v2]", "Kedouin Makoto - Corpse Party Musume")] + [InlineData("Vagabond_v03", "Vagabond")] + [InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "Mahoutsukai to Deshi no Futekisetsu na Kankei")] + [InlineData("Beelzebub_Side_Story_02_RHS.zip", "Beelzebub Side Story")] + [InlineData("[BAA]_Darker_than_Black_Omake-1.zip", "Darker than Black")] + [InlineData("Baketeriya ch01-05.zip", "Baketeriya")] + [InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "Kimi ha midara na Boku no Joou")] + [InlineData("[SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar", "NEEDLESS")] + [InlineData("Fullmetal Alchemist chapters 101-108.cbz", "Fullmetal Alchemist")] + [InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "To Love Ru")] + [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "One Piece - Digital Colored Comics")] + //[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", "Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U")] + [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01", "Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U")] + [InlineData("Vol03_ch15-22.rar", "")] + [InlineData("Love Hina - Special.cbz", "")] // This has to be a fallback case + [InlineData("Ani-Hina Art Collection.cbz", "")] // This has to be a fallback case + [InlineData("Magi - Ch.252-005.cbz", "Magi")] + [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "Umineko no Naku Koro ni")] + [InlineData("Kimetsu no Yaiba - Digital Colored Comics c162 Three Victorious Stars.cbz", "Kimetsu no Yaiba - Digital Colored Comics")] + [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "Amaenaideyo MS")] + [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "NEEDLESS")] + [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "Okusama wa Shougakusei")] + [InlineData("VanDread-v01-c001[MD].zip", "VanDread")] + [InlineData("Momo The Blood Taker - Chapter 027 Violent Emotion.cbz", "Momo The Blood Taker")] public void ParseSeriesTest(string filename, string expected) { - Assert.Equal(expected, ParseSeries(filename)); + Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); } - + [Theory] [InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")] [InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")] @@ -143,83 +177,75 @@ namespace API.Tests [InlineData("Vol 1", "0")] [InlineData("VanDread-v01-c001[MD].zip", "1")] [InlineData("Goblin Slayer Side Story - Year One 025.5", "25.5")] + [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 01", "1")] + [InlineData("To Love Ru v11 Uncensored (Ch.089-097+Omake)", "89-97")] + [InlineData("To Love Ru v18 Uncensored (Ch.153-162.5)", "153-162.5")] + [InlineData("[AN] Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1", "1")] + [InlineData("Beelzebub_Side_Story_02_RHS.zip", "2")] + [InlineData("[PROzess]Kimi_ha_midara_na_Boku_no_Joou_-_Ch01", "1")] + [InlineData("Fullmetal Alchemist chapters 101-108.cbz", "101-108")] + [InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")] + [InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")] + [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")] + [InlineData("Beelzebub_153b_RHS.zip", "153.5")] + [InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")] + [InlineData("Transferred to another world magical swordsman v1.1", "1")] + [InlineData("Transferred to another world magical swordsman v1.2", "2")] + [InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")] + [InlineData("Kiss x Sis - Ch.12 - 1 , 2 , 3P!.cbz", "12")] + [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "1")] + [InlineData("Kiss x Sis - Ch.00 - Let's Start from 0.cbz", "0")] + [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "2")] + [InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "3")] public void ParseChaptersTest(string filename, string expected) { - Assert.Equal(expected, ParseChapter(filename)); - } - - - [Theory] - [InlineData("0001", "1")] - [InlineData("1", "1")] - [InlineData("0013", "13")] - public void RemoveLeadingZeroesTest(string input, string expected) - { - Assert.Equal(expected, RemoveLeadingZeroes(input)); + Assert.Equal(expected, API.Parser.Parser.ParseChapter(filename)); } - [Theory] - [InlineData("1", "001")] - [InlineData("10", "010")] - [InlineData("100", "100")] - public void PadZerosTest(string input, string expected) - { - Assert.Equal(expected, PadZeros(input)); - } - - [Theory] - [InlineData("Hello_I_am_here", "Hello I am here")] - [InlineData("Hello_I_am_here ", "Hello I am here")] - [InlineData("[ReleaseGroup] The Title", "The Title")] - [InlineData("[ReleaseGroup]_The_Title", "The Title")] - [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", "Kasumi Otoko no Ko v1.1")] - public void CleanTitleTest(string input, string expected) - { - Assert.Equal(expected, CleanTitle(input)); - } - - [Theory] - [InlineData("test.cbz", true)] - [InlineData("test.cbr", true)] - [InlineData("test.zip", true)] - [InlineData("test.rar", true)] - [InlineData("test.rar.!qb", false)] - [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] - public void IsArchiveTest(string input, bool expected) - { - Assert.Equal(expected, IsArchive(input)); - } [Theory] [InlineData("Tenjou Tenge Omnibus", "Omnibus")] [InlineData("Tenjou Tenge {Full Contact Edition}", "Full Contact Edition")] [InlineData("Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", "Full Contact Edition")] + [InlineData("Wotakoi - Love is Hard for Otaku Omnibus v01 (2018) (Digital) (danke-Empire)", "Omnibus")] + [InlineData("To Love Ru v01 Uncensored (Ch.001-007)", "Uncensored")] + [InlineData("Chobits Omnibus Edition v01 [Dark Horse]", "Omnibus Edition")] + [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "")] + [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "Full Color")] public void ParseEditionTest(string input, string expected) { - Assert.Equal(expected, ParseEdition(input)); + Assert.Equal(expected, API.Parser.Parser.ParseEdition(input)); + } + [Theory] + [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] + [InlineData("Beelzebub_Omake_June_2012_RHS", true)] + [InlineData("Beelzebub_Side_Story_02_RHS.zip", false)] + [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)] + [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)] + [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)] + [InlineData("Ani-Hina Art Collection.cbz", true)] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] + public void ParseMangaSpecialTest(string input, bool expected) + { + Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); } [Theory] - [InlineData("12-14", 12)] - [InlineData("24", 24)] - [InlineData("18-04", 4)] - [InlineData("18-04.5", 4.5)] - [InlineData("40", 40)] - public void MinimumNumberFromRangeTest(string input, float expected) + [InlineData("image.png", MangaFormat.Image)] + [InlineData("image.cbz", MangaFormat.Archive)] + [InlineData("image.txt", MangaFormat.Unknown)] + public void ParseFormatTest(string inputFile, MangaFormat expected) { - Assert.Equal(expected, MinimumNumberFromRange(input)); + Assert.Equal(expected, API.Parser.Parser.ParseFormat(inputFile)); } [Theory] - [InlineData("Darker Than Black", "darkerthanblack")] - [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] - [InlineData("", "")] - public void NormalizeTest(string input, string expected) + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown].epub", "Side Stories")] + public void ParseSpecialTest(string inputFile, string expected) { - Assert.Equal(expected, Normalize(input)); + Assert.Equal(expected, API.Parser.Parser.ParseMangaSpecial(inputFile)); } - [Fact] public void ParseInfoTest() { @@ -309,7 +335,7 @@ namespace API.Tests foreach (var file in expected.Keys) { var expectedInfo = expected[file]; - var actual = Parse(file, rootPath); + var actual = API.Parser.Parser.Parse(file, rootPath); if (expectedInfo == null) { Assert.Null(actual); diff --git a/API.Tests/Parser/ParserInfoTests.cs b/API.Tests/Parser/ParserInfoTests.cs new file mode 100644 index 000000000..78b879de7 --- /dev/null +++ b/API.Tests/Parser/ParserInfoTests.cs @@ -0,0 +1,110 @@ +using API.Entities.Enums; +using API.Parser; +using Xunit; + +namespace API.Tests.Parser +{ + public class ParserInfoTests + { + [Fact] + public void MergeFromTest() + { + var p1 = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var p2 = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "Darker Than Black", + Volumes = "0" + }; + + var expected = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + p1.Merge(p2); + + AssertSame(expected, p1); + + } + + [Fact] + public void MergeFromTest2() + { + var p1 = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var p2 = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "Darker Than Black", + Volumes = "1" + }; + + var expected = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "1" + }; + p1.Merge(p2); + + AssertSame(expected, p1); + + } + + + private void AssertSame(ParserInfo expected, ParserInfo actual) + { + Assert.Equal(expected.Chapters, actual.Chapters); + Assert.Equal(expected.Volumes, actual.Volumes); + Assert.Equal(expected.Edition, actual.Edition); + Assert.Equal(expected.Filename, actual.Filename); + Assert.Equal(expected.Format, actual.Format); + Assert.Equal(expected.Series, actual.Series); + Assert.Equal(expected.IsSpecial, actual.IsSpecial); + Assert.Equal(expected.FullFilePath, actual.FullFilePath); + } + } +} \ No newline at end of file diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs new file mode 100644 index 000000000..2f46c6bb2 --- /dev/null +++ b/API.Tests/Parser/ParserTest.cs @@ -0,0 +1,192 @@ +using Xunit; +using static API.Parser.Parser; + +namespace API.Tests.Parser +{ + public class ParserTests + { + + [Theory] + [InlineData("0001", "1")] + [InlineData("1", "1")] + [InlineData("0013", "13")] + public void RemoveLeadingZeroesTest(string input, string expected) + { + Assert.Equal(expected, RemoveLeadingZeroes(input)); + } + + [Theory] + [InlineData("1", "001")] + [InlineData("10", "010")] + [InlineData("100", "100")] + public void PadZerosTest(string input, string expected) + { + Assert.Equal(expected, PadZeros(input)); + } + + [Theory] + [InlineData("Hello_I_am_here", "Hello I am here")] + [InlineData("Hello_I_am_here ", "Hello I am here")] + [InlineData("[ReleaseGroup] The Title", "The Title")] + [InlineData("[ReleaseGroup]_The_Title", "The Title")] + [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", "Kasumi Otoko no Ko v1.1")] + public void CleanTitleTest(string input, string expected) + { + Assert.Equal(expected, CleanTitle(input)); + } + + + // [Theory] + // //[InlineData("@font-face{font-family:\"PaytoneOne\";src:url(\"..\\/Fonts\\/PaytoneOne.ttf\")}", "@font-face{font-family:\"PaytoneOne\";src:url(\"PaytoneOne.ttf\")}")] + // [InlineData("@font-face{font-family:\"PaytoneOne\";src:url(\"..\\/Fonts\\/PaytoneOne.ttf\")}", "..\\/Fonts\\/PaytoneOne.ttf")] + // //[InlineData("@font-face{font-family:'PaytoneOne';src:url('..\\/Fonts\\/PaytoneOne.ttf')}", "@font-face{font-family:'PaytoneOne';src:url('PaytoneOne.ttf')}")] + // //[InlineData("@font-face{\r\nfont-family:'PaytoneOne';\r\nsrc:url('..\\/Fonts\\/PaytoneOne.ttf')\r\n}", "@font-face{font-family:'PaytoneOne';src:url('PaytoneOne.ttf')}")] + // public void ReplaceStyleUrlTest(string input, string expected) + // { + // var replacementStr = "PaytoneOne.ttf"; + // // TODO: Use Match to validate since replace is weird + // //Assert.Equal(expected, FontSrcUrlRegex.Replace(input, "$1" + replacementStr + "$2" + "$3")); + // var match = FontSrcUrlRegex.Match(input); + // Assert.Equal(!string.IsNullOrEmpty(expected), FontSrcUrlRegex.Match(input).Success); + // } + + + [Theory] + [InlineData("test.cbz", true)] + [InlineData("test.cbr", true)] + [InlineData("test.zip", true)] + [InlineData("test.rar", true)] + [InlineData("test.rar.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] + public void IsArchiveTest(string input, bool expected) + { + Assert.Equal(expected, IsArchive(input)); + } + + [Theory] + [InlineData("test.epub", true)] + [InlineData("test.pdf", false)] + [InlineData("test.mobi", false)] + [InlineData("test.djvu", false)] + [InlineData("test.zip", false)] + [InlineData("test.rar", false)] + [InlineData("test.epub.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] + public void IsBookTest(string input, bool expected) + { + Assert.Equal(expected, IsBook(input)); + } + + [Theory] + [InlineData("test.epub", true)] + [InlineData("test.EPUB", true)] + [InlineData("test.mobi", false)] + [InlineData("test.epub.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] + public void IsEpubTest(string input, bool expected) + { + Assert.Equal(expected, IsEpub(input)); + } + + // [Theory] + // [InlineData("Tenjou Tenge Omnibus", "Omnibus")] + // [InlineData("Tenjou Tenge {Full Contact Edition}", "Full Contact Edition")] + // [InlineData("Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", "Full Contact Edition")] + // [InlineData("Wotakoi - Love is Hard for Otaku Omnibus v01 (2018) (Digital) (danke-Empire)", "Omnibus")] + // [InlineData("To Love Ru v01 Uncensored (Ch.001-007)", "Uncensored")] + // [InlineData("Chobits Omnibus Edition v01 [Dark Horse]", "Omnibus Edition")] + // [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "Digital Colored Comics")] + // [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "Full Color")] + // public void ParseEditionTest(string input, string expected) + // { + // Assert.Equal(expected, ParseEdition(input)); + // } + + // [Theory] + // [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] + // [InlineData("Beelzebub_Omake_June_2012_RHS", true)] + // [InlineData("Beelzebub_Side_Story_02_RHS.zip", false)] + // [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)] + // [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)] + // [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)] + // [InlineData("Ani-Hina Art Collection.cbz", true)] + // public void ParseMangaSpecialTest(string input, bool expected) + // { + // Assert.Equal(expected, ParseMangaSpecial(input) != ""); + // } + + [Theory] + [InlineData("12-14", 12)] + [InlineData("24", 24)] + [InlineData("18-04", 4)] + [InlineData("18-04.5", 4.5)] + [InlineData("40", 40)] + public void MinimumNumberFromRangeTest(string input, float expected) + { + Assert.Equal(expected, MinimumNumberFromRange(input)); + } + + [Theory] + [InlineData("Darker Than Black", "darkerthanblack")] + [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] + [InlineData("Darker Than_Black", "darkerthanblack")] + [InlineData("", "")] + public void NormalizeTest(string input, string expected) + { + Assert.Equal(expected, Normalize(input)); + } + + + + [Theory] + [InlineData("test.jpg", true)] + [InlineData("test.jpeg", true)] + [InlineData("test.png", true)] + [InlineData(".test.jpg", false)] + [InlineData("!test.jpg", false)] + public void IsImageTest(string filename, bool expected) + { + Assert.Equal(expected, IsImage(filename)); + } + + [Theory] + [InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")] + [InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")] + [InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")] + public void FallbackTest(string rootDir, string inputPath, string expectedSeries) + { + var actual = Parse(inputPath, rootDir); + if (actual == null) + { + Assert.NotNull(actual); + return; + } + + Assert.Equal(expectedSeries, actual.Series); + } + + [Theory] + [InlineData("Love Hina - Special.jpg", false)] + [InlineData("folder.jpg", true)] + [InlineData("DearS_v01_cover.jpg", true)] + [InlineData("DearS_v01_covers.jpg", false)] + [InlineData("!cover.jpg", true)] + [InlineData("cover.jpg", true)] + [InlineData("cover.png", true)] + [InlineData("ch1/cover.png", true)] + public void IsCoverImageTest(string inputPath, bool expected) + { + Assert.Equal(expected, IsCoverImage(inputPath)); + } + + [Theory] + [InlineData("__MACOSX/Love Hina - Special.jpg", true)] + [InlineData("TEST/Love Hina - Special.jpg", false)] + [InlineData("__macosx/Love Hina/", false)] + [InlineData("MACOSX/Love Hina/", false)] + public void HasBlacklistedFolderInPathTest(string inputPath, bool expected) + { + Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index e448ddf1c..d907ab75a 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -2,10 +2,10 @@ using System.IO; using System.IO.Compression; using API.Archive; -using API.Interfaces.Services; using API.Services; using Microsoft.Extensions.Logging; using NSubstitute; +using NSubstitute.Extensions; using Xunit; using Xunit.Abstractions; @@ -14,7 +14,7 @@ namespace API.Tests.Services public class ArchiveServiceTests { private readonly ITestOutputHelper _testOutputHelper; - private readonly IArchiveService _archiveService; + private readonly ArchiveService _archiveService; private readonly ILogger _logger = Substitute.For>(); public ArchiveServiceTests(ITestOutputHelper testOutputHelper) @@ -58,6 +58,9 @@ namespace API.Tests.Services [InlineData("file in folder in folder.zip", 1)] [InlineData("file in folder.zip", 1)] [InlineData("file in folder_alt.zip", 1)] + [InlineData("macos_none.zip", 0)] + [InlineData("macos_one.zip", 1)] + [InlineData("macos_native.zip", 21)] public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected) { var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); @@ -110,6 +113,34 @@ namespace API.Tests.Services DirectoryService.ClearAndDeleteDirectory(extractDirectory); } + + + [Theory] + [InlineData(new [] {"folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"vol1/"}, "")] + [InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")] + [InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "")] + [InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "folder.jpg")] + public void FindFolderEntry(string[] files, string expected) + { + var foundFile = _archiveService.FindFolderEntry(files); + Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile); + } + + [Theory] + [InlineData(new [] {"folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"vol1/"}, "")] + [InlineData(new [] {"folder.jpg", "vol1/folder.jpg"}, "folder.jpg")] + [InlineData(new [] {"cover.jpg", "vol1/folder.jpg"}, "cover.jpg")] + [InlineData(new [] {"page 2.jpg", "page 10.jpg"}, "page 2.jpg")] + [InlineData(new [] {"__MACOSX/cover.jpg", "vol1/page 01.jpg"}, "vol1/page 01.jpg")] + [InlineData(new [] {"Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg", "Akame ga KILL! ZERO - c060 (v10) - p200 [Digital] [LuCaZ].jpg", "folder.jpg"}, "Akame ga KILL! ZERO - c055 (v10) - p000 [Digital] [LuCaZ].jpg")] + public void FindFirstEntry(string[] files, string expected) + { + var foundFile = _archiveService.FirstFileEntry(files); + Assert.Equal(expected, string.IsNullOrEmpty(foundFile) ? "" : foundFile); + } @@ -118,20 +149,47 @@ namespace API.Tests.Services [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")] [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")] //[InlineData("png.zip", "png.PNG")] - public void GetCoverImageTest(string inputFile, string expectedOutputFile) + [InlineData("macos_native.zip", "macos_native.jpg")] + [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.jpg")] + [InlineData("sorting.zip", "sorting.expected.jpg")] + public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile) { + var archiveService = Substitute.For(_logger); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); + archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default); Stopwatch sw = Stopwatch.StartNew(); - Assert.Equal(expectedBytes, _archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); + Assert.Equal(expectedBytes, archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); + _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); + } + + + [Theory] + [InlineData("v10.cbz", "v10.expected.jpg")] + [InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")] + [InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")] + //[InlineData("png.zip", "png.PNG")] + [InlineData("macos_native.zip", "macos_native.jpg")] + [InlineData("v10 - duplicate covers.cbz", "v10 - duplicate covers.expected.jpg")] + [InlineData("sorting.zip", "sorting.expected.jpg")] + public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile) + { + var archiveService = Substitute.For(_logger); + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); + var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); + + archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.SharpCompress); + Stopwatch sw = Stopwatch.StartNew(); + Assert.Equal(expectedBytes, archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); } [Theory] - [InlineData("06_v01[DMM].zip")] + [InlineData("Archives/macos_native.zip")] + [InlineData("Formats/One File with DB_Supported.zip")] public void CanParseCoverImage(string inputFile) { - var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/"); Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile))); } diff --git a/API.Tests/Services/BackupServiceTests.cs b/API.Tests/Services/BackupServiceTests.cs deleted file mode 100644 index 878b57c94..000000000 --- a/API.Tests/Services/BackupServiceTests.cs +++ /dev/null @@ -1,47 +0,0 @@ -using API.Interfaces; -using API.Services; -using API.Services.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using NSubstitute; - -namespace API.Tests.Services -{ - public class BackupServiceTests - { - private readonly DirectoryService _directoryService; - private readonly BackupService _backupService; - private readonly IUnitOfWork _unitOfWork = Substitute.For(); - private readonly ILogger _directoryLogger = Substitute.For>(); - private readonly ILogger _logger = Substitute.For>(); - private readonly IConfiguration _config; - - // public BackupServiceTests() - // { - // var inMemorySettings = new Dictionary { - // {"Logging:File:MaxRollingFiles", "0"}, - // {"Logging:File:Path", "file.log"}, - // }; - // - // _config = new ConfigurationBuilder() - // .AddInMemoryCollection(inMemorySettings) - // .Build(); - // - // //_config.GetMaxRollingFiles().Returns(0); - // //_config.GetLoggingFileName().Returns("file.log"); - // //var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BackupService/"); - // //Directory.GetCurrentDirectory().Returns(testDirectory); - // - // _directoryService = new DirectoryService(_directoryLogger); - // _backupService = new BackupService(_unitOfWork, _logger, _directoryService, _config); - // } - // - // [Fact] - // public void Test() - // { - // _backupService.BackupDatabase(); - // } - - - } -} \ No newline at end of file diff --git a/API.Tests/Services/BookServiceTests.cs b/API.Tests/Services/BookServiceTests.cs new file mode 100644 index 000000000..ecc374830 --- /dev/null +++ b/API.Tests/Services/BookServiceTests.cs @@ -0,0 +1,31 @@ +using System.IO; +using API.Interfaces; +using API.Services; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services +{ + public class BookServiceTests + { + private readonly IBookService _bookService; + private readonly ILogger _logger = Substitute.For>(); + + public BookServiceTests() + { + _bookService = new BookService(_logger); + } + + [Theory] + [InlineData("The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub", 16)] + [InlineData("Non-existent file.epub", 0)] + [InlineData("Non an ebub.pdf", 0)] + public void GetNumberOfPagesTest(string filePath, int expectedPages) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService/EPUB"); + Assert.Equal(expectedPages, _bookService.GetNumberOfPages(Path.Join(testDirectory, filePath))); + } + + } +} \ No newline at end of file diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs index 2072dae1f..410c43ade 100644 --- a/API.Tests/Services/CacheServiceTests.cs +++ b/API.Tests/Services/CacheServiceTests.cs @@ -41,7 +41,7 @@ // //[InlineData("", 0, "")] // public void GetCachedPagePathTest_Should() // { - // // TODO: Figure out how to test this + // // // string archivePath = "flat file.zip"; // // int pageNum = 0; // // string expected = "cache/1/pexels-photo-6551949.jpg"; diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs index 39ff717c5..3754e910a 100644 --- a/API.Tests/Services/DirectoryServiceTests.cs +++ b/API.Tests/Services/DirectoryServiceTests.cs @@ -1,4 +1,5 @@ -using System.IO; +using System.Collections.Generic; +using System.IO; using System.Linq; using API.Services; using Microsoft.Extensions.Logging; @@ -18,6 +19,17 @@ namespace API.Tests.Services _directoryService = new DirectoryService(_logger); } + [Fact] + public void GetFilesTest_Should_Be28() + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga"); + var files = new List(); + var fileCount = DirectoryService.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), + API.Parser.Parser.ArchiveFileExtensions, _logger); + + Assert.Equal(28, fileCount); + } + [Fact] public void GetFiles_WithCustomRegex_ShouldPass_Test() { @@ -74,5 +86,17 @@ namespace API.Tests.Services Assert.DoesNotContain(dirs, s => s.Contains("regex")); } + + [Theory] + [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] + [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")] + [InlineData("C:/Manga", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] + [InlineData("C:/Manga", @"C:\Manga\Love Hina\Specials\Omake\", "Omake,Specials,Love Hina")] + [InlineData(@"/manga/", @"/manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] + public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray) + { + var expected = expectedArray.Split(","); + Assert.Equal(expected, DirectoryService.GetFoldersTillRoot(rootPath, fullpath)); + } } } \ No newline at end of file diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index c052a8880..7b7e6bc2f 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -1,10 +1,22 @@ using System; +using System.Collections.Concurrent; using System.Collections.Generic; +using System.Data.Common; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Data; using API.Entities; using API.Interfaces; using API.Interfaces.Services; +using API.Parser; using API.Services; using API.Services.Tasks; +using API.Tests.Helpers; +using AutoMapper; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; @@ -12,48 +24,180 @@ using Xunit.Abstractions; namespace API.Tests.Services { - public class ScannerServiceTests + public class ScannerServiceTests : IDisposable { private readonly ITestOutputHelper _testOutputHelper; private readonly ScannerService _scannerService; private readonly ILogger _logger = Substitute.For>(); - private readonly IUnitOfWork _unitOfWork = Substitute.For(); private readonly IArchiveService _archiveService = Substitute.For(); - private readonly IMetadataService _metadataService; + private readonly IBookService _bookService = Substitute.For(); private readonly ILogger _metadataLogger = Substitute.For>(); - private Library _libraryMock; + + private readonly DbConnection _connection; + private readonly DataContext _context; + public ScannerServiceTests(ITestOutputHelper testOutputHelper) { + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + + //BackgroundJob.Enqueue is what I need to mock or something (it's static...) + // ICacheService cacheService, ILogger logger, IScannerService scannerService, + // IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService, + // IBackgroundJobClient jobClient + //var taskScheduler = new TaskScheduler(Substitute.For(), Substitute.For>(), Substitute.For<) + + + // Substitute.For>() - Not needed because only for UserService + IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For(), null); + + _testOutputHelper = testOutputHelper; - _scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); - _metadataService= Substitute.For(_unitOfWork, _metadataLogger, _archiveService); - _libraryMock = new Library() + IMetadataService metadataService = Substitute.For(unitOfWork, _metadataLogger, _archiveService, _bookService); + _scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService); + } + + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + await Seed.SeedSettings(_context); + + _context.Library.Add(new Library() { - Id = 1, Name = "Manga", Folders = new List() { new FolderPath() { - Id = 1, - LastScanned = DateTime.Now, - LibraryId = 1, - Path = "E:/Manga" - } - }, - LastModified = DateTime.Now, - Series = new List() - { - new Series() - { - Id = 0, - Name = "Darker Than Black" + Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga") } } + }); + return await _context.SaveChangesAsync() > 0; + } + + // [Fact] + // public void Test() + // { + // _scannerService.ScanLibrary(1, false); + // + // var series = _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Result.Series; + // } + + [Fact] + public void FindSeriesNotOnDisk_Should_RemoveNothing_Test() + { + var infos = new Dictionary>(); + + AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1"}); + AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10"}); + + var existingSeries = new List(); + existingSeries.Add(new Series() + { + Name = "Cage of Eden", + LocalizedName = "Cage of Eden", + OriginalName = "Cage of Eden", + NormalizedName = API.Parser.Parser.Normalize("Cage of Eden") + }); + existingSeries.Add(new Series() + { + Name = "Darker Than Black", + LocalizedName = "Darker Than Black", + OriginalName = "Darker Than Black", + NormalizedName = API.Parser.Parser.Normalize("Darker Than Black") + }); + + + + Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos)); + } + + [Theory] + [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] + [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] + [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")] + [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] + public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) + { + var collectedSeries = new ConcurrentDictionary>(); + foreach (var seriesName in existingSeriesNames) + { + AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName}); + } + + var actualName = _scannerService.MergeName(collectedSeries, new ParserInfo() + { + Series = parsedInfoName + }); + + Assert.Equal(expected, actualName); + } + + [Fact] + public void RemoveMissingSeries_Should_RemoveSeries() + { + var existingSeries = new List() + { + EntityFactory.CreateSeries("Darker than Black Vol 1"), + EntityFactory.CreateSeries("Darker than Black"), + EntityFactory.CreateSeries("Beastars"), }; + var missingSeries = new List() + { + EntityFactory.CreateSeries("Darker than Black Vol 1"), + }; + existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList(); + + Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name)); + Assert.Equal(missingSeries.Count, removeCount); + } + + private void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) + { + if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>)) + { + ((ConcurrentDictionary>) collectedSeries).AddOrUpdate(info.Series, new List() {info}, (_, oldValue) => + { + oldValue ??= new List(); + if (!oldValue.Contains(info)) + { + oldValue.Add(info); + } + + return oldValue; + }); + } + else + { + if (!collectedSeries.ContainsKey(info.Series)) + { + collectedSeries.Add(info.Series, new List() {info}); + } + else + { + var list = collectedSeries[info.Series]; + if (!list.Contains(info)) + { + list.Add(info); + } + + collectedSeries[info.Series] = list; + } + + } } + + // [Fact] // public void ExistingOrDefault_Should_BeFromLibrary() @@ -111,5 +255,16 @@ namespace API.Tests.Services // _testOutputHelper.WriteLine(_libraryMock.ToString()); Assert.True(true); } + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); } } \ No newline at end of file diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip b/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip deleted file mode 100644 index 8fa84e0ac..000000000 Binary files a/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip and /dev/null differ diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/macos_native.zip b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_native.zip new file mode 100644 index 000000000..a84f32b35 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_native.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/macos_none.zip b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_none.zip new file mode 100644 index 000000000..abf5fb125 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_none.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/macos_one.zip b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_one.zip new file mode 100644 index 000000000..67d45d0c0 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_one.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.jpg b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.jpg new file mode 100644 index 000000000..575b9e556 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.jpg differ diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.zip b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.zip new file mode 100644 index 000000000..a84f32b35 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/sorting.expected.jpg b/API.Tests/Services/Test Data/ArchiveService/CoverImages/sorting.expected.jpg new file mode 100644 index 000000000..bd9d441cd Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/sorting.expected.jpg differ diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/sorting.zip b/API.Tests/Services/Test Data/ArchiveService/CoverImages/sorting.zip new file mode 100644 index 000000000..88e6fe03d Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/sorting.zip differ diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/v10 - duplicate covers.cbz b/API.Tests/Services/Test Data/ArchiveService/CoverImages/v10 - duplicate covers.cbz new file mode 100644 index 000000000..20cc070f9 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/v10 - duplicate covers.cbz differ diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/v10 - duplicate covers.expected.jpg b/API.Tests/Services/Test Data/ArchiveService/CoverImages/v10 - duplicate covers.expected.jpg new file mode 100644 index 000000000..51fd89ca0 Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/v10 - duplicate covers.expected.jpg differ diff --git a/API.Tests/Services/Test Data/ArchiveService/Formats/One File with DB_Supported.zip b/API.Tests/Services/Test Data/ArchiveService/Formats/One File with DB_Supported.zip new file mode 100644 index 000000000..6199192cb Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Formats/One File with DB_Supported.zip differ diff --git a/API.Tests/Services/Test Data/BookService/EPUB/The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub b/API.Tests/Services/Test Data/BookService/EPUB/The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub new file mode 100644 index 000000000..7388bc85e Binary files /dev/null and b/API.Tests/Services/Test Data/BookService/EPUB/The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub differ diff --git a/API.Tests/Services/Test Data/DirectoryService/TestCases/Manga-testcase.txt b/API.Tests/Services/Test Data/DirectoryService/TestCases/Manga-testcase.txt new file mode 100644 index 000000000..9aebe3e44 --- /dev/null +++ b/API.Tests/Services/Test Data/DirectoryService/TestCases/Manga-testcase.txt @@ -0,0 +1,153 @@ +\A Town Where You Live\A Town Where You Live Vol. 01.zip +\A Town Where You Live\A Town Where You Live Vol. 02.zip +\A Town Where You Live\A Town Where You Live Vol. 03.zip +\A Town Where You Live\A Town Where You Live Vol. 04.zip +\A Town Where You Live\A Town Where You Live Vol. 05.zip +\A Town Where You Live\A Town Where You Live Vol. 06.zip +\A Town Where You Live\A Town Where You Live Vol. 07.zip +\A Town Where You Live\A Town Where You Live Vol. 08.zip +\A Town Where You Live\A Town Where You Live Vol. 09.zip +\A Town Where You Live\A Town Where You Live Vol. 10.zip +\A Town Where You Live\A Town Where You Live Vol. 11.zip +\A Town Where You Live\A Town Where You Live Vol. 12.zip +\A Town Where You Live\A Town Where You Live Vol. 13.zip +\A Town Where You Live\A Town Where You Live Vol. 14.zip +\A Town Where You Live\A Town Where You Live Vol. 15.zip +\A Town Where You Live\A Town Where You Live Vol. 16.zip +\A Town Where You Live\A Town Where You Live Vol. 17.zip +\A Town Where You Live\A Town Where You Live Vol. 18.zip +\A Town Where You Live\A Town Where You Live Vol. 19.zip +\A Town Where You Live\A Town Where You Live Vol. 20.zip +\A Town Where You Live\A Town Where You Live Vol. 21.zip +\A Town Where You Live\A Town Where You Live Vol. 22.zip +\A Town Where You Live\A Town Where You Live Vol. 23.zip +\A Town Where You Live\A Town Where You Live Vol. 24.zip +\A Town Where You Live\A Town Where You Live Vol. 25.zip +\A Town Where You Live\A Town Where You Live Vol. 26.zip +\A Town Where You Live\A Town Where You Live Vol. 27.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Bonus Chapter.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Princess Lucia Collaboration.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Fantasy.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Youth's Acne.zip +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v01 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v02 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v03 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v04 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v05 (2019) (Digital) (danke-Empire).cbz +\Aiki\Aiki V01.cbz +\Aiki\Aiki V02.cbz +\Aiki\Aiki V03.cbz +\Aiki\Aiki V04.cbz +\Aiki\Aiki V05.cbz +\Aiki\Aiki V06.cbz +\Aiki\Aiki V07.cbz +\Aiki\Aiki V08.cbz +\Aiki\Aiki V09.cbz +\Aiki\Aiki V10.cbz +\Aiki\Aiki V11.cbz +\Aiki\Aiki V12.cbz +\Aiki\Aiki V13.cbz +\Aiki\Aiki V14.cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074.5 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 076 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 077 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 078 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 079 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 080 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 081 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 082 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 084 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 085 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 086 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v01 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v02 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v03 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v04 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v05 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v06 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v07 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v08 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v09 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v10 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v11 (2018) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v12 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v13 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v14 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v15 (2020) (Digital) (danke-Empire).cbz +\Akame ga KILL!\Akame ga KILL! v01 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v02 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v03 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v04 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v05 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v06 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v07 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v08 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v09 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v10 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v11 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v12 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v13 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v14 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v15 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v02 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v03 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v04 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v05 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v06 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v07 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v08 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v09 (2019) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v10 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v01 (2019) (F) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v02 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v03 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v04 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v05 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v06 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v07 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v08 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v09.cbz +\Beastars\BEASTARS v10.cbz +\Beastars\BEASTARS v11.cbz +\Beastars\BEASTARS v12.cbz +\Beastars\BEASTARS v13.cbz +\Beastars\BEASTARS v14.cbz +\Beastars\BEASTARS v15.cbz +\Beastars\BEASTARS v16.cbz +\Beastars\BEASTARS v17.cbz +\Beastars\BEASTARS v18.cbz +\Beastars\BEASTARS v19.cbz +\Beastars\BEASTARS v20.cbz +\Beastars\BEASTARS v21.cbz +\Black Bullet\Black Bullet - v4 c17 [batoto].zip +\Black Bullet\Black Bullet - v4 c17.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c18 [batoto].zip +\Black Bullet\Black Bullet - v4 c18.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c19 [batoto].zip +\Black Bullet\Black Bullet - v4 c19.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c20 [batoto].zip +\Black Bullet\Black Bullet - v4 c20.5 [batoto].zip +\Black Bullet\Black Bullet v01 c01.rar +\Black Bullet\Black Bullet v01 c02.rar +\Black Bullet\Black Bullet v01 c03.rar +\Black Bullet\Black Bullet v01 c04.rar +\Black Bullet\Black Bullet v01 c05.rar +\Black Bullet\Black Bullet v01 c06.rar +\Black Bullet\Black Bullet v01 c07.rar +\Black Bullet\Black Bullet v01 c08.rar +\Black Bullet\Black Bullet v01 c09.5.rar +\Black Bullet\Black Bullet v01 c09.rar +\Black Bullet\Black Bullet v01 c10.rar +\Black Bullet\Black Bullet v01 c11.zip +\Black Bullet\Black Bullet v01 c12.5.rar +\Black Bullet\Black Bullet v01 c12.rar +\Black Bullet\Black Bullet v01 c13.rar +\Black Bullet\Black Bullet v01 c14.rar +\Black Bullet\Black Bullet v01 c15.rar +\Black Bullet\Black Bullet v01 c16.rar diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_omake.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_omake.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v01.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v01.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v02.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v02.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v03.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v03.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v04.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v04.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v01 (digital).cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v01 (digital).cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v02.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v02.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v03.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v03.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v04.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v04.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v05.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v05.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v06.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v06.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v07.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v07.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v08.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v08.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v09.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v09.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v10.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v10.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v11.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v11.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v12.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v12.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v13.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v13.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v14.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v14.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v15.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v15.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v01.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v01.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v02.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v02.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v03.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v03.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v04.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v04.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v05.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v05.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v06.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v06.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v07.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v07.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v10.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v10.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/TestCases/Manga-testcase.txt b/API.Tests/Services/Test Data/ScannerService/TestCases/Manga-testcase.txt new file mode 100644 index 000000000..9aebe3e44 --- /dev/null +++ b/API.Tests/Services/Test Data/ScannerService/TestCases/Manga-testcase.txt @@ -0,0 +1,153 @@ +\A Town Where You Live\A Town Where You Live Vol. 01.zip +\A Town Where You Live\A Town Where You Live Vol. 02.zip +\A Town Where You Live\A Town Where You Live Vol. 03.zip +\A Town Where You Live\A Town Where You Live Vol. 04.zip +\A Town Where You Live\A Town Where You Live Vol. 05.zip +\A Town Where You Live\A Town Where You Live Vol. 06.zip +\A Town Where You Live\A Town Where You Live Vol. 07.zip +\A Town Where You Live\A Town Where You Live Vol. 08.zip +\A Town Where You Live\A Town Where You Live Vol. 09.zip +\A Town Where You Live\A Town Where You Live Vol. 10.zip +\A Town Where You Live\A Town Where You Live Vol. 11.zip +\A Town Where You Live\A Town Where You Live Vol. 12.zip +\A Town Where You Live\A Town Where You Live Vol. 13.zip +\A Town Where You Live\A Town Where You Live Vol. 14.zip +\A Town Where You Live\A Town Where You Live Vol. 15.zip +\A Town Where You Live\A Town Where You Live Vol. 16.zip +\A Town Where You Live\A Town Where You Live Vol. 17.zip +\A Town Where You Live\A Town Where You Live Vol. 18.zip +\A Town Where You Live\A Town Where You Live Vol. 19.zip +\A Town Where You Live\A Town Where You Live Vol. 20.zip +\A Town Where You Live\A Town Where You Live Vol. 21.zip +\A Town Where You Live\A Town Where You Live Vol. 22.zip +\A Town Where You Live\A Town Where You Live Vol. 23.zip +\A Town Where You Live\A Town Where You Live Vol. 24.zip +\A Town Where You Live\A Town Where You Live Vol. 25.zip +\A Town Where You Live\A Town Where You Live Vol. 26.zip +\A Town Where You Live\A Town Where You Live Vol. 27.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Bonus Chapter.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Princess Lucia Collaboration.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Fantasy.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Youth's Acne.zip +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v01 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v02 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v03 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v04 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v05 (2019) (Digital) (danke-Empire).cbz +\Aiki\Aiki V01.cbz +\Aiki\Aiki V02.cbz +\Aiki\Aiki V03.cbz +\Aiki\Aiki V04.cbz +\Aiki\Aiki V05.cbz +\Aiki\Aiki V06.cbz +\Aiki\Aiki V07.cbz +\Aiki\Aiki V08.cbz +\Aiki\Aiki V09.cbz +\Aiki\Aiki V10.cbz +\Aiki\Aiki V11.cbz +\Aiki\Aiki V12.cbz +\Aiki\Aiki V13.cbz +\Aiki\Aiki V14.cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074.5 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 076 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 077 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 078 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 079 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 080 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 081 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 082 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 084 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 085 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 086 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v01 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v02 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v03 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v04 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v05 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v06 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v07 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v08 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v09 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v10 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v11 (2018) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v12 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v13 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v14 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v15 (2020) (Digital) (danke-Empire).cbz +\Akame ga KILL!\Akame ga KILL! v01 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v02 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v03 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v04 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v05 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v06 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v07 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v08 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v09 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v10 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v11 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v12 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v13 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v14 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v15 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v02 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v03 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v04 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v05 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v06 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v07 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v08 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v09 (2019) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v10 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v01 (2019) (F) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v02 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v03 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v04 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v05 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v06 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v07 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v08 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v09.cbz +\Beastars\BEASTARS v10.cbz +\Beastars\BEASTARS v11.cbz +\Beastars\BEASTARS v12.cbz +\Beastars\BEASTARS v13.cbz +\Beastars\BEASTARS v14.cbz +\Beastars\BEASTARS v15.cbz +\Beastars\BEASTARS v16.cbz +\Beastars\BEASTARS v17.cbz +\Beastars\BEASTARS v18.cbz +\Beastars\BEASTARS v19.cbz +\Beastars\BEASTARS v20.cbz +\Beastars\BEASTARS v21.cbz +\Black Bullet\Black Bullet - v4 c17 [batoto].zip +\Black Bullet\Black Bullet - v4 c17.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c18 [batoto].zip +\Black Bullet\Black Bullet - v4 c18.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c19 [batoto].zip +\Black Bullet\Black Bullet - v4 c19.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c20 [batoto].zip +\Black Bullet\Black Bullet - v4 c20.5 [batoto].zip +\Black Bullet\Black Bullet v01 c01.rar +\Black Bullet\Black Bullet v01 c02.rar +\Black Bullet\Black Bullet v01 c03.rar +\Black Bullet\Black Bullet v01 c04.rar +\Black Bullet\Black Bullet v01 c05.rar +\Black Bullet\Black Bullet v01 c06.rar +\Black Bullet\Black Bullet v01 c07.rar +\Black Bullet\Black Bullet v01 c08.rar +\Black Bullet\Black Bullet v01 c09.5.rar +\Black Bullet\Black Bullet v01 c09.rar +\Black Bullet\Black Bullet v01 c10.rar +\Black Bullet\Black Bullet v01 c11.zip +\Black Bullet\Black Bullet v01 c12.5.rar +\Black Bullet\Black Bullet v01 c12.rar +\Black Bullet\Black Bullet v01 c13.rar +\Black Bullet\Black Bullet v01 c14.rar +\Black Bullet\Black Bullet v01 c15.rar +\Black Bullet\Black Bullet v01 c16.rar diff --git a/API.Tests/generate_test_data.py b/API.Tests/generate_test_data.py new file mode 100644 index 000000000..69652969a --- /dev/null +++ b/API.Tests/generate_test_data.py @@ -0,0 +1,80 @@ +""" This script should be run on a directory which will generate a test case file + that can be loaded into the renametest.py""" +import os +from pathlib import Path +import shutil + +verbose = False + +def print_log(val): + if verbose: + print(val) + + +def create_test_base(file, root_dir): + """ Creates and returns a new base directory for data creation for a given testcase.""" + base_dir = os.path.split(file.split('-testcase.txt')[0])[-1] + print_log('base_dir: {0}'.format(base_dir)) + new_dir = os.path.join(root_dir, base_dir) + print_log('new dir: {0}'.format(new_dir)) + p = Path(new_dir) + if not p.exists(): + os.mkdir(new_dir) + + return new_dir + + + +def generate_data(file, root_dir): + ''' Generates directories and fake files for testing against ''' + + base_dir = '' + if file.endswith('-testcase.txt'): + base_dir = create_test_base(file, root_dir) + + files_to_create = [] + with open(file, 'r') as in_file: + files_to_create = in_file.read().splitlines() + + for filepath in files_to_create: + for part in os.path.split(filepath): + part_path = os.path.join(base_dir, part) + print_log('Checking if {0} exists '.format(part_path)) + p = Path(part_path) + + if not p.exists(): + print_log('Creating: {0}'.format(part)) + + if p.suffix != '': + with open(os.path.join(root_dir, base_dir + '/' + filepath), 'w+') as f: + f.write('') + else: + os.mkdir(part_path) + +def clean_up_generated_data(root_dir): + for root, dirs, files in os.walk(root_dir): + for dir in dirs: + shutil.rmtree(os.path.join(root, dir)) + for file in files: + if not file.endswith('-testcase.txt'): + print_log('Removing {0}'.format(os.path.join(root, file))) + os.remove(os.path.join(root, file)) + + +def generate_test_file(): + root_dir = os.path.abspath('.') + current_folder = os.path.split(root_dir)[-1] + out_files = [] + for root, _, files in os.walk(root_dir): + for file in files: + if not file.endswith('-testcase.txt'): + filename = os.path.join(root.replace(root_dir, ''), file) # root_dir or root_dir + '//'? + out_files.append(filename) + + with open(os.path.join(root_dir, current_folder + '-testcase.txt'), 'w+') as f: + for filename in out_files: + f.write(filename + '\n') + +if __name__ == '__main__': + verbose = True + generate_test_file() \ No newline at end of file diff --git a/API/.dockerignore b/API/.dockerignore new file mode 100644 index 000000000..cd967fc3a --- /dev/null +++ b/API/.dockerignore @@ -0,0 +1,25 @@ +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/.idea +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/azds.yaml +**/bin +**/charts +**/docker-compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md \ No newline at end of file diff --git a/API/API.csproj b/API/API.csproj index 0a8cfeb3b..8c96cb129 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -4,37 +4,43 @@ Default net5.0 true + Linux false + ../favicon.ico - - - + + + + - - - - + + + + + all runtime; build; native; contentfiles; analyzers; buildtransitive - + - - + + + - + all runtime; build; native; contentfiles; analyzers; buildtransitive - - + + + @@ -55,4 +61,8 @@ + + <_ContentIncludedByDefault Remove="logs\kavita.json" /> + + diff --git a/API/Comparators/ChapterSortComparer.cs b/API/Comparators/ChapterSortComparer.cs index 1798afe7e..ce7e2f374 100644 --- a/API/Comparators/ChapterSortComparer.cs +++ b/API/Comparators/ChapterSortComparer.cs @@ -2,20 +2,9 @@ namespace API.Comparators { - public class ChapterSortComparer : IComparer + public class ChapterSortComparer : IComparer { - // public int Compare(int x, int y) - // { - // if (x == 0 && y == 0) return 0; - // // if x is 0, it comes second - // if (x == 0) return 1; - // // if y is 0, it comes second - // if (y == 0) return -1; - // - // return x.CompareTo(y); - // } - - public int Compare(float x, float y) + public int Compare(double x, double y) { if (x == 0.0 && y == 0.0) return 0; // if x is 0, it comes second diff --git a/API/Comparators/NaturalSortComparer.cs b/API/Comparators/NaturalSortComparer.cs new file mode 100644 index 000000000..ac10e09ae --- /dev/null +++ b/API/Comparators/NaturalSortComparer.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using System.Text.RegularExpressions; +using static System.GC; +using static System.String; + +namespace API.Comparators +{ + public sealed class NaturalSortComparer : IComparer, IDisposable + { + private readonly bool _isAscending; + private Dictionary _table = new(); + + private bool _disposed; + + + public NaturalSortComparer(bool inAscendingOrder = true) + { + _isAscending = inAscendingOrder; + } + + int IComparer.Compare(string x, string y) + { + if (x == y) return 0; + + if (!_table.TryGetValue(x ?? Empty, out var x1)) + { + // .Replace(" ", Empty) + x1 = Regex.Split(x ?? Empty, "([0-9]+)"); + _table.Add(x ?? Empty, x1); + } + + if (!_table.TryGetValue(y ?? Empty, out var y1)) + { + y1 = Regex.Split(y ?? Empty, "([0-9]+)"); + _table.Add(y ?? Empty, y1); + } + + int returnVal; + + for (var i = 0; i < x1.Length && i < y1.Length; i++) + { + if (x1[i] == y1[i]) continue; + returnVal = PartCompare(x1[i], y1[i]); + return _isAscending ? returnVal : -returnVal; + } + + if (y1.Length > x1.Length) + { + returnVal = 1; + } + else if (x1.Length > y1.Length) + { + returnVal = -1; + } + else + { + returnVal = 0; + } + + return _isAscending ? returnVal : -returnVal; + } + + private static int PartCompare(string left, string right) + { + if (!int.TryParse(left, out var x)) + return Compare(left, right, StringComparison.Ordinal); + + if (!int.TryParse(right, out var y)) + return Compare(left, right, StringComparison.Ordinal); + + return x.CompareTo(y); + } + + private void Dispose(bool disposing) + { + if (!_disposed) + { + if (disposing) + { + // called via myClass.Dispose(). + _table.Clear(); + _table = null; + } + // Release unmanaged resources. + // Set large fields to null. + _disposed = true; + } + } + + public void Dispose() + { + Dispose(true); + SuppressFinalize(this); + } + + ~NaturalSortComparer() // the finalizer + { + Dispose(false); + } + } +} \ No newline at end of file diff --git a/API/Constants/PolicyConstants.cs b/API/Constants/PolicyConstants.cs index d64a2bab6..6b6d93ae0 100644 --- a/API/Constants/PolicyConstants.cs +++ b/API/Constants/PolicyConstants.cs @@ -2,7 +2,7 @@ { public static class PolicyConstants { - public static readonly string AdminRole = "Admin"; - public static readonly string PlebRole = "Pleb"; + public const string AdminRole = "Admin"; + public const string PlebRole = "Pleb"; } } \ No newline at end of file diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 04d0bf9d5..671a436b0 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -82,14 +82,14 @@ namespace API.Controllers [HttpPost("register")] public async Task> Register(RegisterDto registerDto) { - if (await _userManager.Users.AnyAsync(x => x.UserName == registerDto.Username)) + if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper())) { return BadRequest("Username is taken."); } var user = _mapper.Map(registerDto); user.UserPreferences ??= new AppUserPreferences(); - + var result = await _userManager.CreateAsync(user, registerDto.Password); if (!result.Succeeded) return BadRequest(result.Errors); @@ -132,7 +132,7 @@ namespace API.Controllers var result = await _signInManager .CheckPasswordSignInAsync(user, loginDto.Password, false); - if (!result.Succeeded) return Unauthorized(); + if (!result.Succeeded) return Unauthorized("Your credentials are not correct."); // Update LastActive on account user.LastActive = DateTime.Now; diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs new file mode 100644 index 000000000..01588f3f4 --- /dev/null +++ b/API/Controllers/BookController.cs @@ -0,0 +1,295 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using API.DTOs; +using API.Extensions; +using API.Interfaces; +using API.Services; +using HtmlAgilityPack; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using VersOne.Epub; + +namespace API.Controllers +{ + public class BookController : BaseApiController + { + private readonly ILogger _logger; + private readonly IBookService _bookService; + private readonly IUnitOfWork _unitOfWork; + private static readonly string BookApiUrl = "book-resources?file="; + + + public BookController(ILogger logger, IBookService bookService, IUnitOfWork unitOfWork) + { + _logger = logger; + _bookService = bookService; + _unitOfWork = unitOfWork; + } + + [HttpGet("{chapterId}/book-info")] + public async Task> GetBookInfo(int chapterId) + { + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + + return book.Title; + } + + [HttpGet("{chapterId}/book-resources")] + public async Task GetBookPageResources(int chapterId, [FromQuery] string file) + { + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + + var key = BookService.CleanContentKeys(file); + if (!book.Content.AllFiles.ContainsKey(key)) return BadRequest("File was not found in book"); + + var bookFile = book.Content.AllFiles[key]; + var content = await bookFile.ReadContentAsBytesAsync(); + Response.AddCacheHeader(content); + var contentType = BookService.GetContentType(bookFile.ContentType); + return File(content, contentType, $"{chapterId}-{file}"); + } + + [HttpGet("{chapterId}/chapters")] + public async Task>> GetBookChapters(int chapterId) + { + // This will return a list of mappings from ID -> pagenum. ID will be the xhtml key and pagenum will be the reading order + // this is used to rewrite anchors in the book text so that we always load properly in FE + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + var mappings = await _bookService.CreateKeyToPageMappingAsync(book); + + var navItems = await book.GetNavigationAsync(); + var chaptersList = new List(); + + foreach (var navigationItem in navItems) + { + if (navigationItem.NestedItems.Count > 0) + { + _logger.LogDebug("Header: {Header}", navigationItem.Title); + var nestedChapters = new List(); + + foreach (var nestedChapter in navigationItem.NestedItems) + { + if (nestedChapter.Link == null) continue; + var key = BookService.CleanContentKeys(nestedChapter.Link.ContentFileName); + if (mappings.ContainsKey(key)) + { + nestedChapters.Add(new BookChapterItem() + { + Title = nestedChapter.Title, + Page = mappings[key], + Part = nestedChapter.Link.Anchor ?? string.Empty, + Children = new List() + }); + } + } + + if (navigationItem.Link == null) + { + var item = new BookChapterItem() + { + Title = navigationItem.Title, + Children = nestedChapters + }; + if (nestedChapters.Count > 0) + { + item.Page = nestedChapters[0].Page; + } + chaptersList.Add(item); + } + else + { + var groupKey = BookService.CleanContentKeys(navigationItem.Link.ContentFileName); + if (mappings.ContainsKey(groupKey)) + { + chaptersList.Add(new BookChapterItem() + { + Title = navigationItem.Title, + Page = mappings[groupKey], + Children = nestedChapters + }); + } + } + } + } + + if (chaptersList.Count == 0) + { + // Generate from TOC + var tocPage = book.Content.Html.Keys.FirstOrDefault(k => k.ToUpper().Contains("TOC")); + if (tocPage == null) return Ok(chaptersList); + + // Find all anchor tags, for each anchor we get inner text, to lower then titlecase on UI. Get href and generate page content + var doc = new HtmlDocument(); + var content = await book.Content.Html[tocPage].ReadContentAsync(); + doc.LoadHtml(content); + var anchors = doc.DocumentNode.SelectNodes("//a"); + if (anchors == null) return Ok(chaptersList); + + foreach (var anchor in anchors) + { + if (anchor.Attributes.Contains("href")) + { + var key = BookService.CleanContentKeys(anchor.Attributes["href"].Value).Split("#")[0]; + if (!mappings.ContainsKey(key)) + { + // Fallback to searching for key (bad epub metadata) + var correctedKey = book.Content.Html.Keys.SingleOrDefault(s => s.EndsWith(key)); + if (!string.IsNullOrEmpty(correctedKey)) + { + key = correctedKey; + } + } + if (!string.IsNullOrEmpty(key) && mappings.ContainsKey(key)) + { + var part = string.Empty; + if (anchor.Attributes["href"].Value.Contains("#")) + { + part = anchor.Attributes["href"].Value.Split("#")[1]; + } + chaptersList.Add(new BookChapterItem() + { + Title = anchor.InnerText, + Page = mappings[key], + Part = part, + Children = new List() + }); + } + } + } + + } + return Ok(chaptersList); + } + + [HttpGet("{chapterId}/book-page")] + public async Task> GetBookPage(int chapterId, [FromQuery] int page) + { + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + var mappings = await _bookService.CreateKeyToPageMappingAsync(book); + + var counter = 0; + var doc = new HtmlDocument(); + var baseUrl = Request.Scheme + "://" + Request.Host + Request.PathBase + "/api/"; + var apiBase = baseUrl + "book/" + chapterId + "/" + BookApiUrl; + var bookPages = await book.GetReadingOrderAsync(); + foreach (var contentFileRef in bookPages) + { + if (page == counter) + { + var content = await contentFileRef.ReadContentAsync(); + if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content); + + doc.LoadHtml(content); + var body = doc.DocumentNode.SelectSingleNode("/html/body"); + + var inlineStyles = doc.DocumentNode.SelectNodes("//style"); + if (inlineStyles != null) + { + foreach (var inlineStyle in inlineStyles) + { + var styleContent = await _bookService.ScopeStyles(inlineStyle.InnerHtml, apiBase); + body.PrependChild(HtmlNode.CreateNode($"")); + } + } + + var styleNodes = doc.DocumentNode.SelectNodes("/html/head/link"); + if (styleNodes != null) + { + foreach (var styleLinks in styleNodes) + { + var key = BookService.CleanContentKeys(styleLinks.Attributes["href"].Value); + // Some epubs are malformed the key in content.opf might be: content/resources/filelist_0_0.xml but the actual html links to resources/filelist_0_0.xml + // In this case, we will do a search for the key that ends with + if (!book.Content.Css.ContainsKey(key)) + { + var correctedKey = book.Content.Css.Keys.SingleOrDefault(s => s.EndsWith(key)); + if (correctedKey == null) + { + _logger.LogError("Epub is Malformed, key: {Key} is not matching OPF file", key); + continue; + } + + key = correctedKey; + } + var styleContent = await _bookService.ScopeStyles(await book.Content.Css[key].ReadContentAsync(), apiBase); + body.PrependChild(HtmlNode.CreateNode($"")); + } + } + + var anchors = doc.DocumentNode.SelectNodes("//a"); + if (anchors != null) + { + foreach (var anchor in anchors) + { + BookService.UpdateLinks(anchor, mappings, page); + } + } + + var images = doc.DocumentNode.SelectNodes("//img"); + if (images != null) + { + foreach (var image in images) + { + if (image.Name != "img") continue; + + // Need to do for xlink:href + if (image.Attributes["src"] != null) + { + var imageFile = image.Attributes["src"].Value; + if (!book.Content.Images.ContainsKey(imageFile)) + { + var correctedKey = book.Content.Images.Keys.SingleOrDefault(s => s.EndsWith(imageFile)); + if (correctedKey != null) + { + imageFile = correctedKey; + } + } + image.Attributes.Remove("src"); + image.Attributes.Add("src", $"{apiBase}" + imageFile); + } + } + } + + images = doc.DocumentNode.SelectNodes("//image"); + if (images != null) + { + foreach (var image in images) + { + if (image.Name != "image") continue; + + if (image.Attributes["xlink:href"] != null) + { + var imageFile = image.Attributes["xlink:href"].Value; + if (!book.Content.Images.ContainsKey(imageFile)) + { + var correctedKey = book.Content.Images.Keys.SingleOrDefault(s => s.EndsWith(imageFile)); + if (correctedKey != null) + { + imageFile = correctedKey; + } + } + image.Attributes.Remove("xlink:href"); + image.Attributes.Add("xlink:href", $"{apiBase}" + imageFile); + } + } + } + + + + + return Ok(body.InnerHtml); + } + + counter++; + } + + return BadRequest("Could not find the appropriate html for that page"); + } + } +} \ No newline at end of file diff --git a/API/Controllers/FallbackController.cs b/API/Controllers/FallbackController.cs index 56962a3d6..ecd0315e2 100644 --- a/API/Controllers/FallbackController.cs +++ b/API/Controllers/FallbackController.cs @@ -7,6 +7,7 @@ namespace API.Controllers public class FallbackController : Controller { // ReSharper disable once S4487 + // ReSharper disable once NotAccessedField.Local private readonly ITaskScheduler _taskScheduler; public FallbackController(ITaskScheduler taskScheduler) diff --git a/API/Controllers/ImageController.cs b/API/Controllers/ImageController.cs index 62fbd51ae..b05f99409 100644 --- a/API/Controllers/ImageController.cs +++ b/API/Controllers/ImageController.cs @@ -22,7 +22,7 @@ namespace API.Controllers const string format = "jpeg"; Response.AddCacheHeader(content); - return File(content, "image/" + format); + return File(content, "image/" + format, $"chapterId"); } [HttpGet("volume-cover")] @@ -33,7 +33,7 @@ namespace API.Controllers const string format = "jpeg"; Response.AddCacheHeader(content); - return File(content, "image/" + format); + return File(content, "image/" + format, $"volumeId"); } [HttpGet("series-cover")] @@ -44,7 +44,7 @@ namespace API.Controllers const string format = "jpeg"; Response.AddCacheHeader(content); - return File(content, "image/" + format); + return File(content, "image/" + format, $"seriesId"); } } } \ No newline at end of file diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index bc085114b..4867be3d8 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -5,8 +5,8 @@ using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Entities.Enums; using API.Extensions; -using API.Helpers; using API.Interfaces; using API.Interfaces.Services; using AutoMapper; @@ -155,7 +155,7 @@ namespace API.Controllers [HttpPost("refresh-metadata")] public ActionResult RefreshMetadata(int libraryId) { - _taskScheduler.ScanLibrary(libraryId, true); + _taskScheduler.RefreshMetadata(libraryId); return Ok(); } @@ -164,23 +164,7 @@ namespace API.Controllers { return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername())); } - - [HttpGet("series")] - public async Task>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams) - { - // TODO: Move this to SeriesController - var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); - var series = - await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams); - - // Apply progress/rating information (I can't work out how to do this in initial query) - await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series); - - Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); - - return Ok(series); - } - + [Authorize(Policy = "RequireAdminRole")] [HttpDelete("delete")] public async Task> DeleteLibrary(int libraryId) @@ -239,5 +223,11 @@ namespace API.Controllers return Ok(series); } + + [HttpGet("type")] + public async Task> GetLibraryType(int libraryId) + { + return Ok(await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(libraryId)); + } } } \ No newline at end of file diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index d157980ad..43197248e 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -1,8 +1,8 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; -using API.Data; using API.DTOs; using API.Entities; using API.Extensions; @@ -46,7 +46,7 @@ namespace API.Controllers return File(content, "image/" + format); } - + [HttpGet("chapter-path")] public async Task> GetImagePath(int chapterId) { @@ -218,7 +218,8 @@ namespace API.Controllers PagesRead = bookmarkDto.PageNum, VolumeId = bookmarkDto.VolumeId, SeriesId = bookmarkDto.SeriesId, - ChapterId = bookmarkDto.ChapterId + ChapterId = bookmarkDto.ChapterId, + LastModified = DateTime.Now }); } else @@ -226,8 +227,9 @@ namespace API.Controllers userProgress.PagesRead = bookmarkDto.PageNum; userProgress.SeriesId = bookmarkDto.SeriesId; userProgress.VolumeId = bookmarkDto.VolumeId; + userProgress.LastModified = DateTime.Now; } - + _unitOfWork.UserRepository.Update(user); if (await _unitOfWork.Complete()) @@ -237,5 +239,81 @@ namespace API.Controllers return BadRequest("Could not save progress"); } + + /// + /// Returns the next logical volume from the series. + /// + /// + /// + /// + /// chapter id for next manga + [HttpGet("next-chapter")] + public async Task> GetNextChapter(int seriesId, int volumeId, int currentChapterId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); + var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); + + var next = false; + if (currentVolume.Number == 0) + { + foreach (var chapter in currentVolume.Chapters) + { + if (next) + { + return Ok(chapter.Id); + } + if (currentChapterId == chapter.Id) next = true; + } + } + + foreach (var volume in volumes) + { + if (volume.Number == currentVolume.Number + 1) + { + return Ok(volume.Chapters.FirstOrDefault()?.Id); + } + } + return Ok(-1); + } + + /// + /// Returns the previous logical volume from the series. + /// + /// + /// + /// + /// chapter id for next manga + [HttpGet("prev-chapter")] + public async Task> GetPreviousChapter(int seriesId, int volumeId, int currentChapterId) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); + var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); + + var next = false; + if (currentVolume.Number == 0) + { + var chapters = currentVolume.Chapters.Reverse(); + foreach (var chapter in chapters) + { + if (next) + { + return Ok(chapter.Id); + } + if (currentChapterId == chapter.Id) next = true; + } + } + + foreach (var volume in volumes.Reverse()) + { + if (volume.Number == currentVolume.Number - 1) + { + return Ok(volume.Chapters.LastOrDefault()?.Id); + } + } + return Ok(-1); + } + } } \ No newline at end of file diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index e63be3eb5..0654f7d70 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -3,6 +3,7 @@ using System.Threading.Tasks; using API.DTOs; using API.Entities; using API.Extensions; +using API.Helpers; using API.Interfaces; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; @@ -23,6 +24,23 @@ namespace API.Controllers _unitOfWork = unitOfWork; } + [HttpGet] + public async Task>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams) + { + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + var series = + await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams); + + // Apply progress/rating information (I can't work out how to do this in initial query) + if (series == null) return BadRequest("Could not get series for library"); + + await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series); + + Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages); + + return Ok(series); + } + [HttpGet("{seriesId}")] public async Task> GetSeries(int seriesId) { @@ -70,6 +88,8 @@ namespace API.Controllers { return Ok(await _unitOfWork.VolumeRepository.GetChapterDtoAsync(chapterId)); } + + [HttpPost("update-rating")] @@ -105,11 +125,9 @@ namespace API.Controllers if (series == null) return BadRequest("Series does not exist"); - // TODO: Ensure we check against Library for Series Name change - var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name); - if (existingSeries != null && existingSeries.Id != series.Id ) + if (series.Name != updateSeries.Name && await _unitOfWork.SeriesRepository.DoesSeriesNameExistInLibrary(updateSeries.Name)) { - return BadRequest("A series already exists with this name. Name must be unique."); + return BadRequest("A series already exists in this library with this name. Series Names must be unique to a library."); } series.Name = updateSeries.Name; series.LocalizedName = updateSeries.LocalizedName; @@ -129,7 +147,8 @@ namespace API.Controllers [HttpGet("recently-added")] public async Task>> GetRecentlyAdded(int libraryId = 0, int limit = 20) { - return Ok(await _unitOfWork.SeriesRepository.GetRecentlyAdded(libraryId, limit)); + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.SeriesRepository.GetRecentlyAdded(user.Id, libraryId, limit)); } [HttpGet("in-progress")] @@ -138,5 +157,13 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); return Ok(await _unitOfWork.SeriesRepository.GetInProgress(user.Id, libraryId, limit)); } + + [Authorize(Policy = "RequireAdminRole")] + [HttpPost("refresh-metadata")] + public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto) + { + _taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId); + return Ok(); + } } } \ No newline at end of file diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 2889e2317..475323e07 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -3,7 +3,6 @@ using System.IO; using System.IO.Compression; using System.Threading.Tasks; using API.Extensions; -using API.Interfaces; using API.Interfaces.Services; using API.Services; using Microsoft.AspNetCore.Authorization; diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index d149aa0d4..33565af56 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -105,6 +105,13 @@ namespace API.Controllers return Ok(CronConverter.Options); } + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("library-types")] + public ActionResult> GetLibraryTypes() + { + return Ok(Enum.GetNames(typeof(LibraryType))); + } + [Authorize(Policy = "RequireAdminRole")] [HttpGet("log-levels")] public ActionResult> GetLogLevels() diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index 607f508e9..955013bae 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -38,6 +38,14 @@ namespace API.Controllers return Ok(await _unitOfWork.UserRepository.GetMembersAsync()); } + [HttpGet("has-reading-progress")] + public async Task> HasReadingProgress(int libraryId) + { + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId); + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, user.Id)); + } + [HttpGet("has-library-access")] public async Task> HasLibraryAccess(int libraryId) { @@ -53,7 +61,12 @@ namespace API.Controllers existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; existingPreferences.ScalingOption = preferencesDto.ScalingOption; existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; - existingPreferences.HideReadOnDetails = preferencesDto.HideReadOnDetails; + existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; + existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; + existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; + existingPreferences.BookReaderDarkMode = preferencesDto.BookReaderDarkMode; + existingPreferences.BookReaderFontSize = preferencesDto.BookReaderFontSize; + existingPreferences.BookReaderTapToPaginate = preferencesDto.BookReaderTapToPaginate; _unitOfWork.UserRepository.Update(existingPreferences); diff --git a/API/DTOs/BookChapterItem.cs b/API/DTOs/BookChapterItem.cs new file mode 100644 index 000000000..68d1fce40 --- /dev/null +++ b/API/DTOs/BookChapterItem.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; + +namespace API.DTOs +{ + public class BookChapterItem + { + /// + /// Name of the Chapter + /// + public string Title { get; set; } + /// + /// A part represents the id of the anchor so we can scroll to it. 01_values.xhtml#h_sVZPaxUSy/ + /// + public string Part { get; set; } + /// + /// Page Number to load for the chapter + /// + public int Page { get; set; } + public ICollection Children { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/ChapterDto.cs b/API/DTOs/ChapterDto.cs index 00139a3b2..4dcabee33 100644 --- a/API/DTOs/ChapterDto.cs +++ b/API/DTOs/ChapterDto.cs @@ -18,6 +18,14 @@ namespace API.DTOs /// public int Pages { get; init; } /// + /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename + /// + public bool IsSpecial { get; init; } + /// + /// Used for books/specials to display custom title. For non-specials/books, will be set to + /// + public string Title { get; init; } + /// /// The files that represent this Chapter /// public ICollection Files { get; init; } diff --git a/API/DTOs/RefreshSeriesDto.cs b/API/DTOs/RefreshSeriesDto.cs new file mode 100644 index 000000000..bc6344ea2 --- /dev/null +++ b/API/DTOs/RefreshSeriesDto.cs @@ -0,0 +1,8 @@ +namespace API.DTOs +{ + public class RefreshSeriesDto + { + public int LibraryId { get; set; } + public int SeriesId { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/SearchResultDto.cs b/API/DTOs/SearchResultDto.cs index 114b3d03b..001883b23 100644 --- a/API/DTOs/SearchResultDto.cs +++ b/API/DTOs/SearchResultDto.cs @@ -6,6 +6,7 @@ public string Name { get; init; } public string OriginalName { get; init; } public string SortName { get; init; } + public string LocalizedName { get; init; } // Grouping information public string LibraryName { get; set; } diff --git a/API/DTOs/SeriesDto.cs b/API/DTOs/SeriesDto.cs index b3057baac..0f8f4263c 100644 --- a/API/DTOs/SeriesDto.cs +++ b/API/DTOs/SeriesDto.cs @@ -1,4 +1,6 @@ -namespace API.DTOs +using System; + +namespace API.DTOs { public class SeriesDto { @@ -21,7 +23,10 @@ /// Review from logged in user. Calculated at API-time. /// public string UserReview { get; set; } + + public DateTime Created { get; set; } public int LibraryId { get; set; } + public string LibraryName { get; set; } } } \ No newline at end of file diff --git a/API/DTOs/SeriesFilterDto.cs b/API/DTOs/SeriesFilterDto.cs new file mode 100644 index 000000000..e69de29bb diff --git a/API/DTOs/UserPreferencesDto.cs b/API/DTOs/UserPreferencesDto.cs index bec209a5b..baf7b5d25 100644 --- a/API/DTOs/UserPreferencesDto.cs +++ b/API/DTOs/UserPreferencesDto.cs @@ -7,9 +7,11 @@ namespace API.DTOs public ReadingDirection ReadingDirection { get; set; } public ScalingOption ScalingOption { get; set; } public PageSplitOption PageSplitOption { get; set; } - /// - /// Whether UI hides read Volumes on Details page - /// - public bool HideReadOnDetails { get; set; } + public bool BookReaderDarkMode { get; set; } = false; + public int BookReaderMargin { get; set; } + public int BookReaderLineSpacing { get; set; } + public int BookReaderFontSize { get; set; } + public string BookReaderFontFamily { get; set; } + public bool BookReaderTapToPaginate { get; set; } } } \ No newline at end of file diff --git a/API/Data/AppUserProgressRepository.cs b/API/Data/AppUserProgressRepository.cs new file mode 100644 index 000000000..38912b589 --- /dev/null +++ b/API/Data/AppUserProgressRepository.cs @@ -0,0 +1,56 @@ +using System.Linq; +using System.Threading.Tasks; +using API.Entities.Enums; +using API.Interfaces; +using Microsoft.EntityFrameworkCore; + +namespace API.Data +{ + public class AppUserProgressRepository : IAppUserProgressRepository + { + private readonly DataContext _context; + + public AppUserProgressRepository(DataContext context) + { + _context = context; + } + + /// + /// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well. + /// + public async Task CleanupAbandonedChapters() + { + var chapterIds = _context.Chapter.Select(c => c.Id); + + var rowsToRemove = await _context.AppUserProgresses + .Where(progress => !chapterIds.Contains(progress.ChapterId)) + .ToListAsync(); + + _context.RemoveRange(rowsToRemove); + return await _context.SaveChangesAsync() > 0 ? rowsToRemove.Count : 0; + } + + /// + /// Checks if user has any progress against a library of passed type + /// + /// + /// + /// + public async Task UserHasProgress(LibraryType libraryType, int userId) + { + var seriesIds = await _context.AppUserProgresses + .Where(aup => aup.PagesRead > 0 && aup.AppUserId == userId) + .AsNoTracking() + .Select(aup => aup.SeriesId) + .ToListAsync(); + + if (seriesIds.Count == 0) return false; + + return await _context.Series + .Include(s => s.Library) + .Where(s => seriesIds.Contains(s.Id) && s.Library.Type == libraryType) + .AsNoTracking() + .AnyAsync(); + } + } +} \ No newline at end of file diff --git a/API/Data/DataContext.cs b/API/Data/DataContext.cs index f6626d2a8..9f7437cc3 100644 --- a/API/Data/DataContext.cs +++ b/API/Data/DataContext.cs @@ -47,11 +47,16 @@ namespace API.Data .HasForeignKey(ur => ur.RoleId) .IsRequired(); } + void OnEntityTracked(object sender, EntityTrackedEventArgs e) { if (!e.FromQuery && e.Entry.State == EntityState.Added && e.Entry.Entity is IEntityDate entity) + { entity.Created = DateTime.Now; + entity.LastModified = DateTime.Now; + } + } void OnEntityStateChanged(object sender, EntityStateChangedEventArgs e) diff --git a/API/Data/DbFactory.cs b/API/Data/DbFactory.cs new file mode 100644 index 000000000..3589fc30e --- /dev/null +++ b/API/Data/DbFactory.cs @@ -0,0 +1,54 @@ +using System.Collections.Generic; +using API.Entities; +using API.Entities.Enums; +using API.Parser; +using API.Services.Tasks; + +namespace API.Data +{ + /// + /// Responsible for creating Series, Volume, Chapter, MangaFiles for use in + /// + public static class DbFactory + { + public static Series Series(string name) + { + return new () + { + Name = name, + OriginalName = name, + LocalizedName = name, + NormalizedName = Parser.Parser.Normalize(name), + SortName = name, + Summary = string.Empty, + Volumes = new List() + }; + } + + public static Volume Volume(string volumeNumber) + { + return new Volume() + { + Name = volumeNumber, + Number = (int) Parser.Parser.MinimumNumberFromRange(volumeNumber), + Chapters = new List() + }; + } + + public static Chapter Chapter(ParserInfo info) + { + var specialTreatment = info.IsSpecialInfo(); + var specialTitle = specialTreatment ? info.Filename : info.Chapters; + return new Chapter() + { + Number = specialTreatment ? "0" : Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty, + Range = specialTreatment ? info.Filename : info.Chapters, + Title = (specialTreatment && info.Format == MangaFormat.Book) + ? info.Title + : specialTitle, + Files = new List(), + IsSpecial = specialTreatment, + }; + } + } +} \ No newline at end of file diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs index 9a46b44a4..c065bface 100644 --- a/API/Data/LibraryRepository.cs +++ b/API/Data/LibraryRepository.cs @@ -1,10 +1,9 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; +using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Entities.Enums; using API.Interfaces; using AutoMapper; using AutoMapper.QueryableExtensions; @@ -35,15 +34,14 @@ namespace API.Data public async Task> GetLibraryDtosForUsernameAsync(string userName) { - Stopwatch sw = Stopwatch.StartNew(); - var libs = await _context.Library + return await _context.Library .Include(l => l.AppUsers) .Where(library => library.AppUsers.Any(x => x.UserName == userName)) + .OrderBy(l => l.Name) .ProjectTo(_mapper.ConfigurationProvider) .AsNoTracking() + .AsSingleQuery() .ToListAsync(); - Console.WriteLine("Processed GetLibraryDtosForUsernameAsync in {0} milliseconds", sw.ElapsedMilliseconds); - return libs; } public async Task> GetLibrariesAsync() @@ -69,11 +67,23 @@ namespace API.Data .ToListAsync(); } + public async Task GetLibraryTypeAsync(int libraryId) + { + return await _context.Library + .Where(l => l.Id == libraryId) + .AsNoTracking() + .Select(l => l.Type) + .SingleAsync(); + } + public async Task> GetLibraryDtosAsync() { return await _context.Library .Include(f => f.Folders) - .ProjectTo(_mapper.ConfigurationProvider).ToListAsync(); + .OrderBy(l => l.Name) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .ToListAsync(); } public async Task GetLibraryForIdAsync(int libraryId) @@ -91,6 +101,7 @@ namespace API.Data /// public async Task GetFullLibraryForIdAsync(int libraryId) { + return await _context.Library .Where(x => x.Id == libraryId) .Include(f => f.Folders) @@ -98,19 +109,25 @@ namespace API.Data .ThenInclude(s => s.Volumes) .ThenInclude(v => v.Chapters) .ThenInclude(c => c.Files) + .AsSplitQuery() .SingleAsync(); } public async Task LibraryExists(string libraryName) { - return await _context.Library.AnyAsync(x => x.Name == libraryName); + return await _context.Library + .AsNoTracking() + .AnyAsync(x => x.Name == libraryName); } public async Task> GetLibrariesForUserAsync(AppUser user) { - return await _context.Library.Where(library => library.AppUsers.Contains(user)) + return await _context.Library + .Where(library => library.AppUsers.Contains(user)) .Include(l => l.Folders) - .ProjectTo(_mapper.ConfigurationProvider).ToListAsync(); + .AsNoTracking() + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); } diff --git a/API/Data/Migrations/20210330134414_IsSpecialOnChapters.Designer.cs b/API/Data/Migrations/20210330134414_IsSpecialOnChapters.Designer.cs new file mode 100644 index 000000000..910085fd2 --- /dev/null +++ b/API/Data/Migrations/20210330134414_IsSpecialOnChapters.Designer.cs @@ -0,0 +1,739 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210330134414_IsSpecialOnChapters")] + partial class IsSpecialOnChapters + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.1"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("HideReadOnDetails") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210330134414_IsSpecialOnChapters.cs b/API/Data/Migrations/20210330134414_IsSpecialOnChapters.cs new file mode 100644 index 000000000..6653a0b77 --- /dev/null +++ b/API/Data/Migrations/20210330134414_IsSpecialOnChapters.cs @@ -0,0 +1,24 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class IsSpecialOnChapters : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "IsSpecial", + table: "Chapter", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "IsSpecial", + table: "Chapter"); + } + } +} diff --git a/API/Data/Migrations/20210419222000_BookReaderPreferences.Designer.cs b/API/Data/Migrations/20210419222000_BookReaderPreferences.Designer.cs new file mode 100644 index 000000000..eb4dd459a --- /dev/null +++ b/API/Data/Migrations/20210419222000_BookReaderPreferences.Designer.cs @@ -0,0 +1,748 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210419222000_BookReaderPreferences")] + partial class BookReaderPreferences + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210419222000_BookReaderPreferences.cs b/API/Data/Migrations/20210419222000_BookReaderPreferences.cs new file mode 100644 index 000000000..0dd1089eb --- /dev/null +++ b/API/Data/Migrations/20210419222000_BookReaderPreferences.cs @@ -0,0 +1,56 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class BookReaderPreferences : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.RenameColumn( + name: "HideReadOnDetails", + table: "AppUserPreferences", + newName: "BookReaderMargin"); + + migrationBuilder.AddColumn( + name: "BookReaderDarkMode", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: false); + + migrationBuilder.AddColumn( + name: "BookReaderFontFamily", + table: "AppUserPreferences", + type: "TEXT", + nullable: true, + defaultValue: "default"); + + migrationBuilder.AddColumn( + name: "BookReaderLineSpacing", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: 100); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "BookReaderDarkMode", + table: "AppUserPreferences"); + + migrationBuilder.DropColumn( + name: "BookReaderFontFamily", + table: "AppUserPreferences"); + + migrationBuilder.DropColumn( + name: "BookReaderLineSpacing", + table: "AppUserPreferences"); + + migrationBuilder.RenameColumn( + name: "BookReaderMargin", + table: "AppUserPreferences", + newName: "HideReadOnDetails"); + } + } +} diff --git a/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.Designer.cs b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.Designer.cs new file mode 100644 index 000000000..95005cf47 --- /dev/null +++ b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.Designer.cs @@ -0,0 +1,751 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210419234652_BookReaderPreferencesFontSize")] + partial class BookReaderPreferencesFontSize + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.cs b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.cs new file mode 100644 index 000000000..1745e4f73 --- /dev/null +++ b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.cs @@ -0,0 +1,24 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class BookReaderPreferencesFontSize : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "BookReaderFontSize", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: 100); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "BookReaderFontSize", + table: "AppUserPreferences"); + } + } +} diff --git a/API/Data/Migrations/20210423132900_CustomChapterTitle.Designer.cs b/API/Data/Migrations/20210423132900_CustomChapterTitle.Designer.cs new file mode 100644 index 000000000..693480dd3 --- /dev/null +++ b/API/Data/Migrations/20210423132900_CustomChapterTitle.Designer.cs @@ -0,0 +1,751 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210423132900_CustomChapterTitle")] + partial class CustomChapterTitle + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210423132900_CustomChapterTitle.cs b/API/Data/Migrations/20210423132900_CustomChapterTitle.cs new file mode 100644 index 000000000..b3958127c --- /dev/null +++ b/API/Data/Migrations/20210423132900_CustomChapterTitle.cs @@ -0,0 +1,34 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class CustomChapterTitle : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "IsSpecial", + table: "Volume"); + + migrationBuilder.AddColumn( + name: "Title", + table: "Chapter", + type: "TEXT", + nullable: true); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "Title", + table: "Chapter"); + + migrationBuilder.AddColumn( + name: "IsSpecial", + table: "Volume", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + } +} diff --git a/API/Data/Migrations/20210504184715_TapToPaginatePref.Designer.cs b/API/Data/Migrations/20210504184715_TapToPaginatePref.Designer.cs new file mode 100644 index 000000000..86db800ce --- /dev/null +++ b/API/Data/Migrations/20210504184715_TapToPaginatePref.Designer.cs @@ -0,0 +1,754 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210504184715_TapToPaginatePref")] + partial class TapToPaginatePref + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210504184715_TapToPaginatePref.cs b/API/Data/Migrations/20210504184715_TapToPaginatePref.cs new file mode 100644 index 000000000..c1f86ee4b --- /dev/null +++ b/API/Data/Migrations/20210504184715_TapToPaginatePref.cs @@ -0,0 +1,24 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class TapToPaginatePref : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "BookReaderTapToPaginate", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "BookReaderTapToPaginate", + table: "AppUserPreferences"); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index 89fb49145..04e65cfc9 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -14,7 +14,7 @@ namespace API.Data.Migrations { #pragma warning disable 612, 618 modelBuilder - .HasAnnotation("ProductVersion", "5.0.1"); + .HasAnnotation("ProductVersion", "5.0.4"); modelBuilder.Entity("API.Entities.AppRole", b => { @@ -127,7 +127,22 @@ namespace API.Data.Migrations b.Property("AppUserId") .HasColumnType("INTEGER"); - b.Property("HideReadOnDetails") + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") .HasColumnType("INTEGER"); b.Property("PageSplitOption") @@ -233,6 +248,9 @@ namespace API.Data.Migrations b.Property("Created") .HasColumnType("TEXT"); + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + b.Property("LastModified") .HasColumnType("TEXT"); @@ -245,6 +263,9 @@ namespace API.Data.Migrations b.Property("Range") .HasColumnType("TEXT"); + b.Property("Title") + .HasColumnType("TEXT"); + b.Property("VolumeId") .HasColumnType("INTEGER"); @@ -409,9 +430,6 @@ namespace API.Data.Migrations b.Property("Created") .HasColumnType("TEXT"); - b.Property("IsSpecial") - .HasColumnType("INTEGER"); - b.Property("LastModified") .HasColumnType("TEXT"); diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs index 84b904b51..e4a715f11 100644 --- a/API/Data/SeriesRepository.cs +++ b/API/Data/SeriesRepository.cs @@ -1,5 +1,4 @@ using System.Collections.Generic; -using System.Diagnostics; using System.Linq; using System.Threading.Tasks; using API.DTOs; @@ -10,7 +9,6 @@ using API.Interfaces; using AutoMapper; using AutoMapper.QueryableExtensions; using Microsoft.EntityFrameworkCore; -using Microsoft.Extensions.Logging; namespace API.Data { @@ -18,13 +16,11 @@ namespace API.Data { private readonly DataContext _context; private readonly IMapper _mapper; - private readonly ILogger _logger; - public SeriesRepository(DataContext context, IMapper mapper, ILogger logger) + public SeriesRepository(DataContext context, IMapper mapper) { _context = context; _mapper = mapper; - _logger = logger; } public void Add(Series series) @@ -51,6 +47,19 @@ namespace API.Data { return await _context.Series.SingleOrDefaultAsync(x => x.Name == name); } + + public async Task DoesSeriesNameExistInLibrary(string name) + { + var libraries = _context.Series + .AsNoTracking() + .Where(x => x.Name == name) + .Select(s => s.LibraryId); + + return await _context.Series + .AsNoTracking() + .Where(s => libraries.Contains(s.LibraryId) && s.Name == name) + .CountAsync() > 1; + } public Series GetSeriesByName(string name) { @@ -67,15 +76,12 @@ namespace API.Data public async Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams) { - var sw = Stopwatch.StartNew(); var query = _context.Series .Where(s => s.LibraryId == libraryId) .OrderBy(s => s.SortName) .ProjectTo(_mapper.ConfigurationProvider) .AsNoTracking(); - - _logger.LogDebug("Processed GetSeriesDtoForLibraryIdAsync in {ElapsedMilliseconds} milliseconds", sw.ElapsedMilliseconds); return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); } @@ -279,22 +285,43 @@ namespace API.Data /// /// Returns a list of Series that were added, ordered by Created desc /// + /// /// Library to restrict to, if 0, will apply to all libraries /// How many series to pick. /// - public async Task> GetRecentlyAdded(int libraryId, int limit) + public async Task> GetRecentlyAdded(int userId, int libraryId, int limit) { + if (libraryId == 0) + { + var userLibraries = _context.Library + .Include(l => l.AppUsers) + .Where(library => library.AppUsers.Any(user => user.Id == userId)) + .AsNoTracking() + .Select(library => library.Id) + .ToList(); + + return await _context.Series + .Where(s => userLibraries.Contains(s.LibraryId)) + .AsNoTracking() + .OrderByDescending(s => s.Created) + .Take(limit) + .ProjectTo(_mapper.ConfigurationProvider) + .ToListAsync(); + } + return await _context.Series - .Where(s => (libraryId <= 0 || s.LibraryId == libraryId)) - .Take(limit) - .OrderByDescending(s => s.Created) + .Where(s => s.LibraryId == libraryId) .AsNoTracking() + .OrderByDescending(s => s.Created) + .Take(limit) .ProjectTo(_mapper.ConfigurationProvider) .ToListAsync(); + + } /// - /// Returns Series that the user + /// Returns Series that the user has some partial progress on /// /// /// @@ -302,25 +329,42 @@ namespace API.Data /// public async Task> GetInProgress(int userId, int libraryId, int limit) { - var series = await _context.Series - .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new - { - Series = s, - PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id).Sum(s1 => s1.PagesRead), - progress.AppUserId, - progress.LastModified - }) - .Where(s => s.AppUserId == userId - && s.PagesRead > 0 - && s.PagesRead < s.Series.Pages - && (libraryId <= 0 || s.Series.LibraryId == libraryId) ) - .Take(limit) - .OrderByDescending(s => s.LastModified) - .Select(s => s.Series) - .ProjectTo(_mapper.ConfigurationProvider) - .AsNoTracking() - .ToListAsync(); - return series.DistinctBy(s => s.Name); + var series = _context.Series + .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new + { + Series = s, + PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id).Sum(s1 => s1.PagesRead), + progress.AppUserId, + LastModified = _context.AppUserProgresses.Where(p => p.Id == progress.Id).Max(p => p.LastModified) + }); + if (libraryId == 0) + { + var userLibraries = _context.Library + .Include(l => l.AppUsers) + .Where(library => library.AppUsers.Any(user => user.Id == userId)) + .AsNoTracking() + .Select(library => library.Id) + .ToList(); + series = series.Where(s => s.AppUserId == userId + && s.PagesRead > 0 + && s.PagesRead < s.Series.Pages + && userLibraries.Contains(s.Series.LibraryId)); + } + else + { + series = series.Where(s => s.AppUserId == userId + && s.PagesRead > 0 + && s.PagesRead < s.Series.Pages + && s.Series.LibraryId == libraryId); + } + var retSeries = await series + .OrderByDescending(s => s.LastModified) + .Select(s => s.Series) + .ProjectTo(_mapper.ConfigurationProvider) + .AsNoTracking() + .ToListAsync(); + + return retSeries.DistinctBy(s => s.Name).Take(limit); } } } \ No newline at end of file diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index ae2f909a9..caa97523f 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -3,7 +3,6 @@ using API.Entities; using API.Interfaces; using AutoMapper; using Microsoft.AspNetCore.Identity; -using Microsoft.Extensions.Logging; namespace API.Data { @@ -12,23 +11,23 @@ namespace API.Data private readonly DataContext _context; private readonly IMapper _mapper; private readonly UserManager _userManager; - private readonly ILogger _logger; - public UnitOfWork(DataContext context, IMapper mapper, UserManager userManager, ILogger logger) + public UnitOfWork(DataContext context, IMapper mapper, UserManager userManager) { _context = context; _mapper = mapper; _userManager = userManager; - _logger = logger; } - public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper, _logger); + public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper); public IUserRepository UserRepository => new UserRepository(_context, _userManager); public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper); public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper); public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper); + + public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); public async Task Complete() { diff --git a/API/Dockerfile b/API/Dockerfile new file mode 100644 index 000000000..d813139f8 --- /dev/null +++ b/API/Dockerfile @@ -0,0 +1,20 @@ +FROM mcr.microsoft.com/dotnet/aspnet:5.0 AS base +WORKDIR /app +EXPOSE 80 +EXPOSE 443 + +FROM mcr.microsoft.com/dotnet/sdk:5.0 AS build +WORKDIR /src +COPY ["API/API.csproj", "API/"] +RUN dotnet restore "API/API.csproj" +COPY . . +WORKDIR "/src/API" +RUN dotnet build "API.csproj" -c Release -o /app/build + +FROM build AS publish +RUN dotnet publish "API.csproj" -c Release -o /app/publish + +FROM base AS final +WORKDIR /app +COPY --from=publish /app/publish . +ENTRYPOINT ["dotnet", "API.dll"] diff --git a/API/Entities/AppUserPreferences.cs b/API/Entities/AppUserPreferences.cs index 1a2e6b41b..4bc9c71a3 100644 --- a/API/Entities/AppUserPreferences.cs +++ b/API/Entities/AppUserPreferences.cs @@ -5,13 +5,43 @@ namespace API.Entities public class AppUserPreferences { public int Id { get; set; } - public ReadingDirection ReadingDirection { get; set; } = ReadingDirection.LeftToRight; - public ScalingOption ScalingOption { get; set; } = ScalingOption.FitToHeight; - public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; /// - /// Whether UI hides read Volumes on Details page + /// Manga Reader Option: What direction should the next/prev page buttons go /// - public bool HideReadOnDetails { get; set; } = false; + public ReadingDirection ReadingDirection { get; set; } = ReadingDirection.LeftToRight; + /// + /// Manga Reader Option: How should the image be scaled to screen + /// + public ScalingOption ScalingOption { get; set; } = ScalingOption.Automatic; + /// + /// Manga Reader Option: Which side of a split image should we show first + /// + public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; + + /// + /// Book Reader Option: Should the background color be dark + /// + public bool BookReaderDarkMode { get; set; } = false; + /// + /// Book Reader Option: Override extra Margin + /// + public int BookReaderMargin { get; set; } = 15; + /// + /// Book Reader Option: Override line-height + /// + public int BookReaderLineSpacing { get; set; } = 100; + /// + /// Book Reader Option: Override font size + /// + public int BookReaderFontSize { get; set; } = 100; + /// + /// Book Reader Option: Maps to the default Kavita font-family (inherit) or an override + /// + public string BookReaderFontFamily { get; set; } = "default"; + /// + /// Book Reader Option: Allows tapping on side of screens to paginate + /// + public bool BookReaderTapToPaginate { get; set; } = false; diff --git a/API/Entities/AppUserProgress.cs b/API/Entities/AppUserProgress.cs index 170a249ac..65dd43296 100644 --- a/API/Entities/AppUserProgress.cs +++ b/API/Entities/AppUserProgress.cs @@ -5,7 +5,7 @@ using API.Entities.Interfaces; namespace API.Entities { /// - /// Represents the progress a single user has on a given Volume. Progress is realistically tracked against the Volume's chapters. + /// Represents the progress a single user has on a given Chapter. /// public class AppUserProgress : IEntityDate { diff --git a/API/Entities/Chapter.cs b/API/Entities/Chapter.cs index b4d957fe8..31f4dc513 100644 --- a/API/Entities/Chapter.cs +++ b/API/Entities/Chapter.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; +using API.Entities.Enums; using API.Entities.Interfaces; +using API.Parser; namespace API.Entities { @@ -26,10 +28,31 @@ namespace API.Entities /// Total number of pages in all MangaFiles /// public int Pages { get; set; } + /// + /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename + /// + public bool IsSpecial { get; set; } + /// + /// Used for books/specials to display custom title. For non-specials/books, will be set to + /// + public string Title { get; set; } // Relationships public Volume Volume { get; set; } public int VolumeId { get; set; } + public void UpdateFrom(ParserInfo info) + { + Files ??= new List(); + IsSpecial = info.IsSpecialInfo(); + if (IsSpecial) + { + Number = "0"; + } + Title = (IsSpecial && info.Format == MangaFormat.Book) + ? info.Title + : Range; + + } } } \ No newline at end of file diff --git a/API/Entities/Enums/MangaFormat.cs b/API/Entities/Enums/MangaFormat.cs index 31ebd5bb3..121aa3e1c 100644 --- a/API/Entities/Enums/MangaFormat.cs +++ b/API/Entities/Enums/MangaFormat.cs @@ -9,6 +9,8 @@ namespace API.Entities.Enums [Description("Archive")] Archive = 1, [Description("Unknown")] - Unknown = 2 + Unknown = 2, + [Description("Book")] + Book = 3 } } \ No newline at end of file diff --git a/API/Entities/Enums/ScalingOption.cs b/API/Entities/Enums/ScalingOption.cs index 7e144100c..2da3b79f7 100644 --- a/API/Entities/Enums/ScalingOption.cs +++ b/API/Entities/Enums/ScalingOption.cs @@ -4,6 +4,7 @@ { FitToHeight = 0, FitToWidth = 1, - Original = 2 + Original = 2, + Automatic = 3 } } \ No newline at end of file diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs index 4be77a4c1..2efb76bfb 100644 --- a/API/Entities/MangaFile.cs +++ b/API/Entities/MangaFile.cs @@ -1,7 +1,8 @@  using System; +using System.IO; using API.Entities.Enums; -using API.Entities.Interfaces; +using API.Extensions; namespace API.Entities { @@ -25,5 +26,11 @@ namespace API.Entities // Relationship Mapping public Chapter Chapter { get; set; } public int ChapterId { get; set; } + + // Methods + public bool HasFileBeenModified() + { + return new FileInfo(FilePath).DoesLastWriteMatch(LastModified); + } } } \ No newline at end of file diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs index 6406e118f..0ad7c8c16 100644 --- a/API/Entities/Series.cs +++ b/API/Entities/Series.cs @@ -45,5 +45,6 @@ namespace API.Entities public List Volumes { get; set; } public Library Library { get; set; } public int LibraryId { get; set; } + } } \ No newline at end of file diff --git a/API/Entities/Volume.cs b/API/Entities/Volume.cs index 999b9a801..dab9f2e1b 100644 --- a/API/Entities/Volume.cs +++ b/API/Entities/Volume.cs @@ -15,12 +15,7 @@ namespace API.Entities public byte[] CoverImage { get; set; } public int Pages { get; set; } - /// - /// Represents a Side story that is linked to the original Series. Omake, One Shot, etc. - /// - public bool IsSpecial { get; set; } = false; - - + // Relationships public Series Series { get; set; } diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index 69e5a4a68..a3406ae27 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -4,7 +4,6 @@ using API.Interfaces; using API.Interfaces.Services; using API.Services; using API.Services.Tasks; -using AutoMapper; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; @@ -27,8 +26,8 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); services.AddScoped(); - - + services.AddScoped(); + services.AddDbContext(options => { diff --git a/API/Extensions/ChapterListExtensions.cs b/API/Extensions/ChapterListExtensions.cs new file mode 100644 index 000000000..6362c0571 --- /dev/null +++ b/API/Extensions/ChapterListExtensions.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Parser; + +namespace API.Extensions +{ + public static class ChapterListExtensions + { + /// + /// Returns first chapter in the list with at least one file + /// + /// + /// + public static Chapter GetFirstChapterWithFiles(this IList chapters) + { + return chapters.FirstOrDefault(c => c.Files.Any()); + } + + /// + /// Gets a single chapter (or null if doesn't exist) where Range matches the info.Chapters property. If the info + /// is then, the filename is used to search against Range or if filename exists within Files of said Chapter. + /// + /// + /// + /// + public static Chapter GetChapterByRange(this IList chapters, ParserInfo info) + { + var specialTreatment = info.IsSpecialInfo(); + return specialTreatment + ? chapters.SingleOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath))) + : chapters.SingleOrDefault(c => c.Range == info.Chapters); + } + } +} \ No newline at end of file diff --git a/API/Extensions/FileInfoExtensions.cs b/API/Extensions/FileInfoExtensions.cs index eda352fc8..82f6e663f 100644 --- a/API/Extensions/FileInfoExtensions.cs +++ b/API/Extensions/FileInfoExtensions.cs @@ -9,5 +9,10 @@ namespace API.Extensions { return comparison.Equals(fileInfo.LastWriteTime); } + + public static bool IsLastWriteLessThan(this FileInfo fileInfo, DateTime comparison) + { + return fileInfo.LastWriteTime < comparison; + } } } \ No newline at end of file diff --git a/API/Extensions/ParserInfoListExtensions.cs b/API/Extensions/ParserInfoListExtensions.cs new file mode 100644 index 000000000..2043583bb --- /dev/null +++ b/API/Extensions/ParserInfoListExtensions.cs @@ -0,0 +1,33 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Parser; + +namespace API.Extensions +{ + public static class ParserInfoListExtensions + { + /// + /// Selects distinct volume numbers by the "Volumes" key on the ParserInfo + /// + /// + /// + public static IList DistinctVolumes(this IList infos) + { + return infos.Select(p => p.Volumes).Distinct().ToList(); + } + + /// + /// Checks if a list of ParserInfos has a given chapter or not. Lookup occurs on Range property. If a chapter is + /// special, then the is matched, else the field is checked. + /// + /// + /// + /// + public static bool HasInfo(this IList infos, Chapter chapter) + { + return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range) + : infos.Any(v => v.Chapters == chapter.Range); + } + } +} \ No newline at end of file diff --git a/API/Extensions/SeriesExtensions.cs b/API/Extensions/SeriesExtensions.cs new file mode 100644 index 000000000..29f495d76 --- /dev/null +++ b/API/Extensions/SeriesExtensions.cs @@ -0,0 +1,20 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; + +namespace API.Extensions +{ + public static class SeriesExtensions + { + /// + /// Checks against all the name variables of the Series if it matches anything in the list. + /// + /// + /// + /// + public static bool NameInList(this Series series, IEnumerable list) + { + return list.Any(name => Parser.Parser.Normalize(name) == series.NormalizedName || Parser.Parser.Normalize(name) == Parser.Parser.Normalize(series.Name) || name == series.Name || name == series.LocalizedName || name == series.OriginalName); + } + } +} \ No newline at end of file diff --git a/API/Extensions/VolumeListExtensions.cs b/API/Extensions/VolumeListExtensions.cs new file mode 100644 index 000000000..5b50d382f --- /dev/null +++ b/API/Extensions/VolumeListExtensions.cs @@ -0,0 +1,38 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Entities.Enums; + +namespace API.Extensions +{ + public static class VolumeListExtensions + { + public static Volume FirstWithChapters(this IList volumes, bool inBookSeries) + { + return inBookSeries + ? volumes.FirstOrDefault(v => v.Chapters.Any()) + : volumes.FirstOrDefault(v => v.Chapters.Any() && (v.Number == 1)); + } + + /// + /// Selects the first Volume to get the cover image from. For a book with only a special, the special will be returned. + /// If there are both specials and non-specials, then the first non-special will be returned. + /// + /// + /// + /// + public static Volume GetCoverImage(this IList volumes, LibraryType libraryType) + { + if (libraryType == LibraryType.Book) + { + return volumes.OrderBy(x => x.Number).FirstOrDefault(); + } + + if (volumes.Any(x => x.Number != 0)) + { + return volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + } + return volumes.OrderBy(x => x.Number).FirstOrDefault(); + } + } +} \ No newline at end of file diff --git a/API/Helpers/Converters/CronConverter.cs b/API/Helpers/Converters/CronConverter.cs index c31afb417..cacf018b1 100644 --- a/API/Helpers/Converters/CronConverter.cs +++ b/API/Helpers/Converters/CronConverter.cs @@ -5,7 +5,6 @@ namespace API.Helpers.Converters { public static class CronConverter { - // TODO: this isn't used. Replace strings with Enums? public static readonly IEnumerable Options = new [] { "disabled", @@ -14,7 +13,7 @@ namespace API.Helpers.Converters }; public static string ConvertToCronNotation(string source) { - string destination = ""; + var destination = string.Empty; destination = source.ToLower() switch { "daily" => Cron.Daily(), @@ -29,7 +28,7 @@ namespace API.Helpers.Converters public static string ConvertFromCronNotation(string cronNotation) { - string destination = ""; + var destination = string.Empty; destination = cronNotation.ToLower() switch { "0 0 31 2 *" => "disabled", diff --git a/API/Helpers/PagedList.cs b/API/Helpers/PagedList.cs index 0900f02a5..b87687a6e 100644 --- a/API/Helpers/PagedList.cs +++ b/API/Helpers/PagedList.cs @@ -24,6 +24,7 @@ namespace API.Helpers public static async Task> CreateAsync(IQueryable source, int pageNumber, int pageSize) { + // NOTE: OrderBy warning being thrown here even if query has the orderby statement var count = await source.CountAsync(); var items = await source.Skip((pageNumber - 1) * pageSize).Take(pageSize).ToListAsync(); return new PagedList(items, count, pageNumber, pageSize); diff --git a/API/Interfaces/IAppUserProgressRepository.cs b/API/Interfaces/IAppUserProgressRepository.cs new file mode 100644 index 000000000..96ada0c50 --- /dev/null +++ b/API/Interfaces/IAppUserProgressRepository.cs @@ -0,0 +1,11 @@ +using System.Threading.Tasks; +using API.Entities.Enums; + +namespace API.Interfaces +{ + public interface IAppUserProgressRepository + { + Task CleanupAbandonedChapters(); + Task UserHasProgress(LibraryType libraryType, int userId); + } +} \ No newline at end of file diff --git a/API/Interfaces/IBookService.cs b/API/Interfaces/IBookService.cs new file mode 100644 index 000000000..f0b5a8826 --- /dev/null +++ b/API/Interfaces/IBookService.cs @@ -0,0 +1,23 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using API.Parser; +using VersOne.Epub; + +namespace API.Interfaces +{ + public interface IBookService + { + int GetNumberOfPages(string filePath); + byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true); + Task> CreateKeyToPageMappingAsync(EpubBookRef book); + /// + /// Scopes styles to .reading-section and replaces img src to the passed apiBase + /// + /// + /// + /// + Task ScopeStyles(string stylesheetHtml, string apiBase); + string GetSummaryInfo(string filePath); + ParserInfo ParseInfo(string filePath); + } +} \ No newline at end of file diff --git a/API/Interfaces/ILibraryRepository.cs b/API/Interfaces/ILibraryRepository.cs index 43e0db6e6..f8cedce90 100644 --- a/API/Interfaces/ILibraryRepository.cs +++ b/API/Interfaces/ILibraryRepository.cs @@ -2,6 +2,7 @@ using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Entities.Enums; namespace API.Interfaces { @@ -17,5 +18,6 @@ namespace API.Interfaces Task> GetLibrariesAsync(); Task DeleteLibrary(int libraryId); Task> GetLibrariesForUserIdAsync(int userId); + Task GetLibraryTypeAsync(int libraryId); } } \ No newline at end of file diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs index 34a1715dc..eff8e7c08 100644 --- a/API/Interfaces/ISeriesRepository.cs +++ b/API/Interfaces/ISeriesRepository.cs @@ -11,6 +11,7 @@ namespace API.Interfaces void Add(Series series); void Update(Series series); Task GetSeriesByNameAsync(string name); + Task DoesSeriesNameExistInLibrary(string name); Series GetSeriesByName(string name); /// @@ -57,6 +58,6 @@ namespace API.Interfaces Task GetVolumeCoverImageAsync(int volumeId); Task GetSeriesCoverImageAsync(int seriesId); Task> GetInProgress(int userId, int libraryId, int limit); - Task> GetRecentlyAdded(int libraryId, int limit); + Task> GetRecentlyAdded(int userId, int libraryId, int limit); } } \ No newline at end of file diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs index 5de2f6941..75f70c1fa 100644 --- a/API/Interfaces/ITaskScheduler.cs +++ b/API/Interfaces/ITaskScheduler.cs @@ -10,5 +10,6 @@ void CleanupChapters(int[] chapterIds); void RefreshMetadata(int libraryId, bool forceUpdate = true); void CleanupTemp(); + void RefreshSeriesMetadata(int libraryId, int seriesId); } } \ No newline at end of file diff --git a/API/Interfaces/IUnitOfWork.cs b/API/Interfaces/IUnitOfWork.cs index 24a074e29..fb81313eb 100644 --- a/API/Interfaces/IUnitOfWork.cs +++ b/API/Interfaces/IUnitOfWork.cs @@ -9,6 +9,7 @@ namespace API.Interfaces ILibraryRepository LibraryRepository { get; } IVolumeRepository VolumeRepository { get; } ISettingsRepository SettingsRepository { get; } + IAppUserProgressRepository AppUserProgressRepository { get; } Task Complete(); bool HasChanges(); } diff --git a/API/Interfaces/Services/IMetadataService.cs b/API/Interfaces/Services/IMetadataService.cs index 830cab1eb..70b10b861 100644 --- a/API/Interfaces/Services/IMetadataService.cs +++ b/API/Interfaces/Services/IMetadataService.cs @@ -14,5 +14,11 @@ namespace API.Interfaces.Services public void UpdateMetadata(Chapter chapter, bool forceUpdate); public void UpdateMetadata(Volume volume, bool forceUpdate); public void UpdateMetadata(Series series, bool forceUpdate); + /// + /// Performs a forced refresh of metatdata just for a series and it's nested entities + /// + /// + /// + void RefreshMetadataForSeries(int libraryId, int seriesId); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IScannerService.cs b/API/Interfaces/Services/IScannerService.cs index 695bc59c5..dc9c44623 100644 --- a/API/Interfaces/Services/IScannerService.cs +++ b/API/Interfaces/Services/IScannerService.cs @@ -1,4 +1,5 @@ -namespace API.Interfaces.Services + +namespace API.Interfaces.Services { public interface IScannerService { @@ -9,7 +10,6 @@ /// Library to scan against /// Force overwriting for cover images void ScanLibrary(int libraryId, bool forceUpdate); - void ScanLibraries(); } } \ No newline at end of file diff --git a/API/Middleware/BookRedirectMiddleware.cs b/API/Middleware/BookRedirectMiddleware.cs new file mode 100644 index 000000000..f2e805466 --- /dev/null +++ b/API/Middleware/BookRedirectMiddleware.cs @@ -0,0 +1,22 @@ +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; + +namespace API.Middleware +{ + public class BookRedirectMiddleware + { + private readonly ILogger _logger; + + public BookRedirectMiddleware(ILogger logger) + { + _logger = logger; + } + + public async Task InvokeAsync(HttpContext context, RequestDelegate next) + { + _logger.LogDebug("BookRedirect Path: {Path}", context.Request.Path.ToString()); + await next.Invoke(context); + } + } +} \ No newline at end of file diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index af0383a81..29d584954 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -3,28 +3,38 @@ using System.IO; using System.Linq; using System.Text.RegularExpressions; using API.Entities.Enums; +using API.Services; namespace API.Parser { public static class Parser { - public static readonly string MangaFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip"; - public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg"; + public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip"; + public static readonly string BookFileExtensions = @"\.epub"; + public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; + public static readonly Regex FontSrcUrlRegex = new Regex("(src:url\\(\"?'?)([a-z0-9/\\._]+)(\"?'?\\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly string XmlRegexExtensions = @"\.xml"; private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); - private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex BookFileRegex = new Regex(BookFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex CoverImageRegex = new Regex(@"(?.*)(\b|_)v(?\d+-?\d+)( |_)", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar + new Regex( + @"(?.*)(\b|_)(?!\[)(vol\.?)(?\d+(-\d+)?)(?!\])", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17 new Regex( - @"(?.*)(\b|_)v(?\d+(-\d+)?)", + @"(?.*)(\b|_)(?!\[)v(?\d+(-\d+)?)(?!\])", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Kodomo no Jikan vol. 10 new Regex( @@ -32,41 +42,61 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), // Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) new Regex( - @"(vol\.? ?)(?0*[1-9]+)", + @"(vol\.? ?)(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Tonikaku Cawaii [Volume 11].cbz new Regex( - @"(volume )(?0?[1-9]+)", + @"(volume )(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Tower Of God S01 014 (CBT) (digital).cbz new Regex( @"(?.*)(\b|_|)(S(?\d+))", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz + new Regex( + @"(?.*)( |_|-)(?:Episode)(?: |_)(?\d+(-\d+)?)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), }; private static readonly Regex[] MangaSeriesRegex = new[] { - // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip + // [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar new Regex( - @"(?.*)(\b|_)v(?\d+-?\d*)( |_)", + @"^(?.*)( |_)Vol\.?\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip + new Regex( + @"(?.*)(\b|_)v(?\d+-?\d*)( |_|-)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto] new Regex( @"(?.*)( - )(?:v|vo|c)\d", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) + // [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz new Regex( - @"(?.*) (\b|_|-)v", + @"(?.*) (\b|_|-)(vol)\.?", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + + // Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip + new Regex( + @"(?.*)(?:, Chapter )(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + //Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans] + new Regex( + @"(?.*)(\bc\d+\b)", RegexOptions.IgnoreCase | RegexOptions.Compiled), //Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz new Regex( @"(?.*)(?: _|-|\[|\() ?v", RegexOptions.IgnoreCase | RegexOptions.Compiled), - //Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans] + // Momo The Blood Taker - Chapter 027 Violent Emotion.cbz new Regex( - @"(?.*)(\bc\d+\b)", + @"(?.*) (\b|_|-)(?:chapter)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) + new Regex( + @"(?.*) (\b|_|-)v", RegexOptions.IgnoreCase | RegexOptions.Compiled), //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip // due to duplicate version identifiers in file. @@ -81,10 +111,6 @@ namespace API.Parser new Regex( @"(?.*) (?\d+) (?:\(\d{4}\)) ", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip - new Regex( - @"(?.*)(?:, Chapter )(?\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled), // Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire) new Regex( @"(?.*) (?\d+(?:.\d+|-\d+)?) \(\d{4}\)", @@ -97,17 +123,158 @@ namespace API.Parser new Regex( @"(?.*)( |_)\((c |ch |chapter )", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Black Bullet (This is very loose, keep towards bottom) (?.*)(_)(v|vo|c|volume) + // Black Bullet (This is very loose, keep towards bottom) new Regex( @"(?.*)(_)(v|vo|c|volume)( |_)\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar + // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar new Regex( - @"^(?!Vol)(?.*)( |_)(\d+)", + @"(?.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1 + new Regex( + @"(?.*)( |_)(?:Chp.? ?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01 + new Regex( + @"^(?!Vol)(?.*)( |_)Chapter( |_)(\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + + // Fullmetal Alchemist chapters 101-108.cbz + new Regex( + @"^(?!vol)(?.*)( |_)(chapters( |_)?)\d+-?\d*", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1 + new Regex( + @"^(?!Vol\.?)(?.*)( |_|-)(?.*)( |_|-)(?.*)ch\d+-?\d?", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Magi - Ch.252-005.cbz + new Regex( + @"(?.*)( ?- ?)Ch\.\d+-?\d*", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // [BAA]_Darker_than_Black_Omake-1.zip + new Regex( + @"^(?!Vol)(?.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?.*)( |_)(\d+) RegexOptions.IgnoreCase | RegexOptions.Compiled), // [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last) new Regex( - @"(?.*)( |_)(c)\d+", + @"^(?!Vol)(?.*)( |_|-)(ch?)\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + }; + + private static readonly Regex[] ComicSeriesRegex = new[] + { + // Invincible Vol 01 Family matters (2005) (Digital) + new Regex( + @"(?.*)(\b|_)(vol\.?)( |_)(?\d+(-\d+)?)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) + new Regex( + @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // 01 Spider-Man & Wolverine 01.cbr + new Regex( + @"^(?\d+) (?:- )?(?.*) (\d+)?", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Wildcat (1 of 3) + new Regex( + @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?: |_)v\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?: \d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Robin the Teen Wonder #0 + new Regex( + @"^(?.*)(?: |_)#\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) + new Regex( + @"^(?.*)(?: |_)(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // The First Asterix Frieze (WebP by Doc MaKS) + new Regex( + @"^(?.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // MUST BE LAST: Batman & Daredevil - King of New York + new Regex( + @"^(?.*)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + }; + + private static readonly Regex[] ComicVolumeRegex = new[] + { + // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) + new Regex( + @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // 01 Spider-Man & Wolverine 01.cbr + new Regex( + @"^(?\d+) (?:- )?(?.*) (\d+)?", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Wildcat (1 of 3) + new Regex( + @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?: |_)v(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) + new Regex( + @"^(?.*)(?: |_)(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?\d+))", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Robin the Teen Wonder #0 + new Regex( + @"^(?.*)(?: |_)#(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + }; + + private static readonly Regex[] ComicChapterRegex = new[] + { + // // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) + // new Regex( + // @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", + // RegexOptions.IgnoreCase | RegexOptions.Compiled), + // // 01 Spider-Man & Wolverine 01.cbr + // new Regex( + // @"^(?\d+) (?:- )?(?.*) (\d+)?", // NOTE: WHy is this here without a capture group + // RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Wildcat (1 of 3) + new Regex( + @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?: |_)v(?\d+)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) + new Regex( + @"^(?.*)(?: (?\d+))", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Batman & Robin the Teen Wonder #0 + new Regex( + @"^(?.*)(?: |_)#(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr + new Regex( + @"^(?.*)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", RegexOptions.IgnoreCase | RegexOptions.Compiled), }; @@ -123,41 +290,57 @@ namespace API.Parser private static readonly Regex[] MangaChapterRegex = new[] { + // Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5 new Regex( - @"(c|ch)(\.? ?)(?\d+(?:.\d+|-\d+)?)", + @"(c|ch)(\.? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip new Regex( @"v\d+\.(?\d+(?:.\d+|-\d+)?)", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Mob Psycho 100 + // Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove) + new Regex( + @"^(?.*)(?: |_)#(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz new Regex( - @"^(?!Vol)(?.*) (?\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?", + @"^(?!Vol)(?.*) (?\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?(\b|_|-)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Tower Of God S01 014 (CBT) (digital).cbz new Regex( @"(?.*) S(?\d+) (?\d+(?:.\d+|-\d+)?)", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // Beelzebub_01_[Noodles].zip + // Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip new Regex( - @"^((?!v|vo|vol|Volume).)*( |_)(?\.?\d+(?:.\d+|-\d+)?)( |_|\[|\()", + @"^((?!v|vo|vol|Volume).)*( |_)(?\.?\d+(?:.\d+|-\d+)?)(?b)?( |_|\[|\()", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Yumekui-Merry_DKThias_Chapter21.zip new Regex( @"Chapter(?\d+(-\d+)?)", //(?:.\d+|-\d+)? RegexOptions.IgnoreCase | RegexOptions.Compiled), - + // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar + new Regex( + @"(?.*)( |_)(vol\d+)?( |_)Chp\.? ?(?\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + }; private static readonly Regex[] MangaEditionRegex = { - //Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz + // Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz new Regex( @"(?({|\(|\[).* Edition(}|\)|\]))", RegexOptions.IgnoreCase | RegexOptions.Compiled), - //Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz + // Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz new Regex( - @"(\b|_)(?Omnibus)(\b|_)", + @"(\b|_)(?Omnibus(( |_)?Edition)?)(\b|_)?", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // To Love Ru v01 Uncensored (Ch.001-007) + new Regex( + @"(\b|_)(?Uncensored)(\b|_)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz + new Regex( + @"(\b|_)(?Full(?: |_)Color)(\b|_)?", RegexOptions.IgnoreCase | RegexOptions.Compiled), }; @@ -177,6 +360,14 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), }; + private static readonly Regex[] MangaSpecialRegex = + { + // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. + new Regex( + @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + }; + /// /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed @@ -184,27 +375,64 @@ namespace API.Parser /// /// /// Root folder + /// Defaults to Manga. Allows different Regex to be used for parsing. /// or null if Series was empty - public static ParserInfo Parse(string filePath, string rootPath) + public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga) { var fileName = Path.GetFileName(filePath); - var directoryName = (new FileInfo(filePath)).Directory?.Name; - var rootName = (new DirectoryInfo(rootPath)).Name; - - var ret = new ParserInfo() - { - Chapters = ParseChapter(fileName), - Series = ParseSeries(fileName), - Volumes = ParseVolume(fileName), - Filename = fileName, - Format = ParseFormat(filePath), - FullFilePath = filePath - }; + ParserInfo ret; - if (ret.Series == string.Empty && directoryName != null && directoryName != rootName) + if (type == LibraryType.Book) { - ret.Series = ParseSeries(directoryName); - if (ret.Series == string.Empty) ret.Series = CleanTitle(directoryName); + ret = new ParserInfo() + { + Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName), + Series = ParseSeries(fileName) ?? ParseComicSeries(fileName), + Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName), + Filename = fileName, + Format = ParseFormat(filePath), + FullFilePath = filePath + }; + } + else + { + ret = new ParserInfo() + { + Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), + Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), + Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), + Filename = fileName, + Format = ParseFormat(filePath), + Title = Path.GetFileNameWithoutExtension(fileName), + FullFilePath = filePath + }; + } + + if (ret.Series == string.Empty) + { + // Try to parse information out of each folder all the way to rootPath + var fallbackFolders = DirectoryService.GetFoldersTillRoot(rootPath, Path.GetDirectoryName(filePath)).ToList(); + for (var i = 0; i < fallbackFolders.Count; i++) + { + var folder = fallbackFolders[i]; + if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; + if (ParseVolume(folder) != "0" || ParseChapter(folder) != "0") continue; + + var series = ParseSeries(folder); + + if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1)) + { + ret.Series = CleanTitle(folder); + break; + } + + if (!string.IsNullOrEmpty(series)) + { + ret.Series = series; + break; + } + } + } var edition = ParseEdition(fileName); @@ -213,15 +441,25 @@ namespace API.Parser ret.Series = CleanTitle(ret.Series.Replace(edition, "")); ret.Edition = edition; } + + var isSpecial = ParseMangaSpecial(fileName); + // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that + // could cause a problem as Omake is a special term, but there is valid volume/chapter information. + if (ret.Chapters == "0" && ret.Volumes == "0" && !string.IsNullOrEmpty(isSpecial)) + { + ret.IsSpecial = true; + } + return ret.Series == string.Empty ? null : ret; } - private static MangaFormat ParseFormat(string filePath) + public static MangaFormat ParseFormat(string filePath) { if (IsArchive(filePath)) return MangaFormat.Archive; if (IsImage(filePath)) return MangaFormat.Image; + if (IsBook(filePath)) return MangaFormat.Book; return MangaFormat.Unknown; } @@ -245,6 +483,23 @@ namespace API.Parser return string.Empty; } + public static string ParseMangaSpecial(string filePath) + { + foreach (var regex in MangaSpecialRegex) + { + var matches = regex.Matches(filePath); + foreach (Match match in matches) + { + if (match.Groups["Special"].Success && match.Groups["Special"].Value != string.Empty) + { + return match.Groups["Special"].Value; + } + } + } + + return string.Empty; + } + public static string ParseSeries(string filename) { foreach (var regex in MangaSeriesRegex) @@ -261,6 +516,22 @@ namespace API.Parser return string.Empty; } + public static string ParseComicSeries(string filename) + { + foreach (var regex in ComicSeriesRegex) + { + var matches = regex.Matches(filename); + foreach (Match match in matches) + { + if (match.Groups["Series"].Success && match.Groups["Series"].Value != string.Empty) + { + return CleanTitle(match.Groups["Series"].Value); + } + } + } + + return string.Empty; + } public static string ParseVolume(string filename) { @@ -269,7 +540,29 @@ namespace API.Parser var matches = regex.Matches(filename); foreach (Match match in matches) { - if (match.Groups["Volume"] == Match.Empty) continue; + if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue; + + var value = match.Groups["Volume"].Value; + if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Volume"].Value); + var tokens = value.Split("-"); + var from = RemoveLeadingZeroes(tokens[0]); + var to = RemoveLeadingZeroes(tokens[1]); + return $"{@from}-{to}"; + + } + } + + return "0"; + } + + public static string ParseComicVolume(string filename) + { + foreach (var regex in ComicVolumeRegex) + { + var matches = regex.Matches(filename); + foreach (Match match in matches) + { + if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue; var value = match.Groups["Volume"].Value; if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Volume"].Value); @@ -291,7 +584,45 @@ namespace API.Parser var matches = regex.Matches(filename); foreach (Match match in matches) { - if (match.Groups["Chapter"] != Match.Empty) + if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue; + + var value = match.Groups["Chapter"].Value; + var hasChapterPart = match.Groups["ChapterPart"].Success; + + if (!value.Contains("-")) + { + return RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(value) : value); + } + + var tokens = value.Split("-"); + var from = RemoveLeadingZeroes(tokens[0]); + var to = RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(tokens[1]) : tokens[1]); + return $"{@from}-{to}"; + + } + } + + return "0"; + } + + private static string AddChapterPart(string value) + { + if (value.Contains(".")) + { + return value; + } + + return $"{value}.5"; + } + + public static string ParseComicChapter(string filename) + { + foreach (var regex in ComicChapterRegex) + { + var matches = regex.Matches(filename); + foreach (Match match in matches) + { + if (match.Groups["Chapter"].Success && match.Groups["Chapter"] != Match.Empty) { var value = match.Groups["Chapter"].Value; @@ -321,7 +652,19 @@ namespace API.Parser { if (match.Success) { - title = title.Replace(match.Value, ""); + title = title.Replace(match.Value, "").Trim(); + } + } + } + + foreach (var regex in MangaEditionRegex) + { + var matches = regex.Matches(title); + foreach (Match match in matches) + { + if (match.Success) + { + title = title.Replace(match.Value, "").Trim(); } } } @@ -329,6 +672,25 @@ namespace API.Parser return title; } + private static string RemoveSpecialTags(string title) + { + foreach (var regex in MangaSpecialRegex) + { + var matches = regex.Matches(title); + foreach (Match match in matches) + { + if (match.Success) + { + title = title.Replace(match.Value, "").Trim(); + } + } + } + + return title; + } + + + /// /// Translates _ -> spaces, trims front and back of string, removes release groups /// @@ -340,6 +702,8 @@ namespace API.Parser title = RemoveEditionTagHolders(title); + title = RemoveSpecialTags(title); + title = title.Replace("_", " ").Trim(); if (title.EndsWith("-")) { @@ -403,11 +767,16 @@ namespace API.Parser public static bool IsArchive(string filePath) { - return MangaFileRegex.IsMatch(Path.GetExtension(filePath)); + return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath)); + } + public static bool IsBook(string filePath) + { + return BookFileRegex.IsMatch(Path.GetExtension(filePath)); } - public static bool IsImage(string filePath) + public static bool IsImage(string filePath, bool suppressExtraChecks = false) { + if (filePath.StartsWith(".") || (!suppressExtraChecks && filePath.StartsWith("!"))) return false; return ImageRegex.IsMatch(Path.GetExtension(filePath)); } @@ -418,15 +787,34 @@ namespace API.Parser public static float MinimumNumberFromRange(string range) { - var tokens = range.Split("-"); + var tokens = range.Replace("_", string.Empty).Split("-"); return tokens.Min(float.Parse); } public static string Normalize(string name) { - return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", ""); + return Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty); } - + /// + /// Tests whether the file is a cover image such that: contains "cover", is named "folder", and is an image + /// + /// + /// + public static bool IsCoverImage(string name) + { + return IsImage(name, true) && (CoverImageRegex.IsMatch(name)); + } + + public static bool HasBlacklistedFolderInPath(string path) + { + return path.Contains("__MACOSX"); + } + + + public static bool IsEpub(string filePath) + { + return Path.GetExtension(filePath).ToLower() == ".epub"; + } } } \ No newline at end of file diff --git a/API/Parser/ParserInfo.cs b/API/Parser/ParserInfo.cs index ee92ddd9f..e49d87e74 100644 --- a/API/Parser/ParserInfo.cs +++ b/API/Parser/ParserInfo.cs @@ -7,16 +7,36 @@ namespace API.Parser /// public class ParserInfo { - // This can be multiple + /// + /// Represents the parsed chapters from a file. By default, will be 0 which means nothing could be parsed. + /// The chapters can only be a single float or a range of float ie) 1-2. Mainly floats should be multiples of 0.5 representing specials + /// public string Chapters { get; set; } = ""; + /// + /// Represents the parsed series from the file or folder + /// public string Series { get; set; } = ""; - // This can be multiple + /// + /// Represents the parsed volumes from a file. By default, will be 0 which means that nothing could be parsed. + /// If Volumes is 0 and Chapters is 0, the file is a special. If Chapters is non-zero, then no volume could be parsed. + /// Beastars Vol 3-4 will map to "3-4" + /// The volumes can only be a single int or a range of ints ie) 1-2. Float based volumes are not supported. + /// public string Volumes { get; set; } = ""; + /// + /// Filename of the underlying file + /// Beastars v01 (digital).cbz + /// public string Filename { get; init; } = ""; + /// + /// Full filepath of the underlying file + /// C:/Manga/Beastars v01 (digital).cbz + /// public string FullFilePath { get; set; } = ""; /// - /// that represents the type of the file (so caching service knows how to cache for reading) + /// that represents the type of the file + /// Mainly used to show in the UI and so caching service knows how to cache for reading. /// public MangaFormat Format { get; set; } = MangaFormat.Unknown; @@ -24,5 +44,40 @@ namespace API.Parser /// This can potentially story things like "Omnibus, Color, Full Contact Edition, Extra, Final, etc" /// public string Edition { get; set; } = ""; + + /// + /// If the file contains no volume/chapter information or contains Special Keywords + /// + public bool IsSpecial { get; set; } + + /// + /// Used for specials or books, stores what the UI should show. + /// Manga does not use this field + /// + public string Title { get; set; } = string.Empty; + + /// + /// If the ParserInfo has the IsSpecial tag or both volumes and chapters are default aka 0 + /// + /// + public bool IsSpecialInfo() + { + return (IsSpecial || (Volumes == "0" && Chapters == "0")); + } + + /// + /// Merges non empty/null properties from info2 into this entity. + /// + /// + public void Merge(ParserInfo info2) + { + if (info2 == null) return; + Chapters = string.IsNullOrEmpty(Chapters) || Chapters == "0" ? info2.Chapters: Chapters; + Volumes = string.IsNullOrEmpty(Volumes) || Volumes == "0" ? info2.Volumes : Volumes; + Edition = string.IsNullOrEmpty(Edition) ? info2.Edition : Edition; + Title = string.IsNullOrEmpty(Title) ? info2.Title : Title; + Series = string.IsNullOrEmpty(Series) ? info2.Series : Series; + IsSpecial = IsSpecial || info2.IsSpecial; + } } } \ No newline at end of file diff --git a/API/Program.cs b/API/Program.cs index ca814beb9..1eedcaaa5 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -2,9 +2,9 @@ using System; using System.Threading.Tasks; using API.Data; using API.Entities; -using API.Services; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Identity; +using Microsoft.AspNetCore.Server.Kestrel.Core; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; @@ -40,13 +40,6 @@ namespace API var logger = services.GetRequiredService < ILogger>(); logger.LogError(ex, "An error occurred during migration"); } - - // Load all tasks from DI and initialize them (TODO: This is not working - WarmupServicesStartupTask is Null) - var startupTasks = host.Services.GetServices(); - foreach (var startupTask in startupTasks) - { - await startupTask.ExecuteAsync(); - } await host.RunAsync(); } @@ -57,38 +50,12 @@ namespace API { webBuilder.UseKestrel((opts) => { - opts.ListenAnyIP(HttpPort); + opts.ListenAnyIP(HttpPort, options => + { + options.Protocols = HttpProtocols.Http1AndHttp2; + }); }); webBuilder.UseStartup(); }); - - // private static void StartNewInstance() - // { - // //_logger.LogInformation("Starting new instance"); - // - // var module = options.RestartPath; - // - // if (string.IsNullOrWhiteSpace(module)) - // { - // module = Environment.GetCommandLineArgs()[0]; - // } - // - // // string commandLineArgsString; - // // if (options.RestartArgs != null) - // // { - // // commandLineArgsString = options.RestartArgs ?? string.Empty; - // // } - // // else - // // { - // // commandLineArgsString = string.Join( - // // ' ', - // // Environment.GetCommandLineArgs().Skip(1).Select(NormalizeCommandLineArgument)); - // // } - // - // //_logger.LogInformation("Executable: {0}", module); - // //_logger.LogInformation("Arguments: {0}", commandLineArgsString); - // - // Process.Start(module, Array.Empty); - // } } } diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 03635188a..dc490844c 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -6,10 +6,12 @@ using System.IO.Compression; using System.Linq; using System.Xml.Serialization; using API.Archive; +using API.Comparators; using API.Extensions; using API.Interfaces.Services; using API.Services.Tasks; using Microsoft.Extensions.Logging; +using Microsoft.IO; using SharpCompress.Archives; using SharpCompress.Common; using Image = NetVips.Image; @@ -19,14 +21,18 @@ namespace API.Services /// /// Responsible for manipulating Archive files. Used by and /// + // ReSharper disable once ClassWithVirtualMembersNeverInherited.Global public class ArchiveService : IArchiveService { private readonly ILogger _logger; - private const int ThumbnailWidth = 320; // 153w x 230h TODO: Look into optimizing the images to be smaller + private const int ThumbnailWidth = 320; // 153w x 230h + private static readonly RecyclableMemoryStreamManager StreamManager = new(); + private readonly NaturalSortComparer _comparer; public ArchiveService(ILogger logger) { _logger = logger; + _comparer = new NaturalSortComparer(); } /// @@ -34,9 +40,9 @@ namespace API.Services /// /// /// - public ArchiveLibrary CanOpen(string archivePath) + public virtual ArchiveLibrary CanOpen(string archivePath) { - if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath)) return ArchiveLibrary.NotSupported; + if (!(File.Exists(archivePath) && Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; try { @@ -56,7 +62,7 @@ namespace API.Services } } } - + public int GetNumberOfPagesFromArchive(string archivePath) { if (!IsValidArchive(archivePath)) @@ -74,13 +80,15 @@ namespace API.Services { _logger.LogDebug("Using default compression handling"); using ZipArchive archive = ZipFile.OpenRead(archivePath); - return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName)); + return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName)); } case ArchiveLibrary.SharpCompress: { _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); - return archive.Entries.Count(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)); + return archive.Entries.Count(entry => !entry.IsDirectory && + !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) + && Parser.Parser.IsImage(entry.Key)); } case ArchiveLibrary.NotSupported: _logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); @@ -96,11 +104,42 @@ namespace API.Services return 0; } } + + /// + /// Finds the first instance of a folder entry and returns it + /// + /// + /// Entry name of match, null if no match + public string FindFolderEntry(IEnumerable entryFullNames) + { + var result = entryFullNames + .FirstOrDefault(x => !Path.EndsInDirectorySeparator(x) && !Parser.Parser.HasBlacklistedFolderInPath(x) + && Parser.Parser.IsCoverImage(x)); + + return string.IsNullOrEmpty(result) ? null : result; + } + + /// + /// Returns first entry that is an image and is not in a blacklisted folder path. Uses for ordering files + /// + /// + /// Entry name of match, null if no match + public string FirstFileEntry(IEnumerable entryFullNames) + { + var result = entryFullNames.OrderBy(Path.GetFileName, _comparer) + .FirstOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x) + && Parser.Parser.IsImage(x)); + + return string.IsNullOrEmpty(result) ? null : result; + } + /// /// Generates byte array of cover image. - /// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless - /// a folder.extension exists in the root directory of the compressed file. + /// Given a path to a compressed file , will ensure the first image (respects directory structure) is returned unless + /// a folder/cover.(image extension) exists in the the compressed file (if duplicate, the first is chosen) + /// + /// This skips over any __MACOSX folder/file iteration. /// /// /// Create a smaller variant of file extracted from archive. Archive images are usually 1MB each. @@ -117,17 +156,28 @@ namespace API.Services { _logger.LogDebug("Using default compression handling"); using var archive = ZipFile.OpenRead(archivePath); - var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder"); - var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList(); - var entry = folder ?? entries[0]; + var entryNames = archive.Entries.Select(e => e.FullName).ToArray(); - return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry); + var entryName = FindFolderEntry(entryNames) ?? FirstFileEntry(entryNames); + var entry = archive.Entries.Single(e => e.FullName == entryName); + using var stream = entry.Open(); + + return createThumbnail ? CreateThumbnail(entry.FullName, stream) : ConvertEntryToByteArray(entry); } case ArchiveLibrary.SharpCompress: { _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); - return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail); + var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList(); + + var entryName = FindFolderEntry(entryNames) ?? FirstFileEntry(entryNames); + var entry = archive.Entries.Single(e => e.Key == entryName); + + using var ms = StreamManager.GetStream(); + entry.WriteTo(ms); + ms.Position = 0; + + return createThumbnail ? CreateThumbnail(entry.Key, ms, Path.GetExtension(entry.Key)) : ms.ToArray(); } case ArchiveLibrary.NotSupported: _logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath); @@ -145,42 +195,12 @@ namespace API.Services return Array.Empty(); } - private byte[] FindCoverImage(IEnumerable entries, bool createThumbnail) - { - var images = entries.ToList(); - foreach (var entry in images) - { - if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder") - { - using var ms = new MemoryStream(); - entry.WriteTo(ms); - ms.Position = 0; - return createThumbnail ? CreateThumbnail(ms.ToArray(), Path.GetExtension(entry.Key)) : ms.ToArray(); - } - } - - if (images.Any()) - { - var entry = images.OrderBy(e => e.Key).FirstOrDefault(); - if (entry == null) return Array.Empty(); - using var ms = new MemoryStream(); - entry.WriteTo(ms); - ms.Position = 0; - var data = ms.ToArray(); - return createThumbnail ? CreateThumbnail(data, Path.GetExtension(entry.Key)) : data; - } - - return Array.Empty(); - } - private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry) { using var stream = entry.Open(); - using var ms = new MemoryStream(); + using var ms = StreamManager.GetStream(); stream.CopyTo(ms); - var data = ms.ToArray(); - - return data; + return ms.ToArray(); } /// @@ -194,30 +214,10 @@ namespace API.Services // Sometimes ZipArchive will list the directory and others it will just keep it in the FullName return archive.Entries.Count > 0 && !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || - archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar)); - } - - private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg") - { - if (!formatExtension.StartsWith(".")) - { - formatExtension = "." + formatExtension; - } - - try - { - using var thumbnail = Image.ThumbnailBuffer(entry, ThumbnailWidth); - return thumbnail.WriteToBuffer(formatExtension); - } - catch (Exception ex) - { - _logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image"); - } - - return Array.Empty(); + archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); } - private byte[] CreateThumbnail(ZipArchiveEntry entry, string formatExtension = ".jpg") + private byte[] CreateThumbnail(string entryName, Stream stream, string formatExtension = ".jpg") { if (!formatExtension.StartsWith(".")) { @@ -225,13 +225,12 @@ namespace API.Services } try { - using var stream = entry.Open(); using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); return thumbnail.WriteToBuffer(formatExtension); } catch (Exception ex) { - _logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entry.FullName); + _logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName); } return Array.Empty(); @@ -250,7 +249,7 @@ namespace API.Services return false; } - if (Parser.Parser.IsArchive(archivePath)) return true; + if (Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath)) return true; _logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); return false; @@ -261,9 +260,9 @@ namespace API.Services { foreach (var entry in entries) { - if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key)) + if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && !Parser.Parser.HasBlacklistedFolderInPath(entry.Key) && Parser.Parser.IsXml(entry.Key)) { - using var ms = new MemoryStream(); + using var ms = StreamManager.GetStream(); entry.WriteTo(ms); ms.Position = 0; @@ -295,7 +294,7 @@ namespace API.Services { _logger.LogDebug("Using default compression handling"); using var archive = ZipFile.OpenRead(archivePath); - var entry = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName)); + var entry = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName) && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName)); if (entry != null) { using var stream = entry.Open(); @@ -308,14 +307,16 @@ namespace API.Services { _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); - info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key))); + info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory + && !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) + && Parser.Parser.IsXml(entry.Key))); break; } case ArchiveLibrary.NotSupported: - _logger.LogError("[GetSummaryInfo] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError("[GetSummaryInfo] This archive cannot be read: {ArchivePath}", archivePath); return summary; default: - _logger.LogError("[GetSummaryInfo] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError("[GetSummaryInfo] There was an exception when reading archive stream: {ArchivePath}", archivePath); return summary; } @@ -323,8 +324,6 @@ namespace API.Services { return info.Summary; } - - _logger.LogError("[GetSummaryInfo] Could not parse archive file: {Filepath}", archivePath); } catch (Exception ex) { @@ -392,14 +391,16 @@ namespace API.Services { _logger.LogDebug("Using SharpCompress compression handling"); using var archive = ArchiveFactory.Open(archivePath); - ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath); + ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory + && !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) + && Parser.Parser.IsImage(entry.Key)), extractPath); break; } case ArchiveLibrary.NotSupported: - _logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError("[ExtractArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); return; default: - _logger.LogError("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError("[ExtractArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); return; } diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs new file mode 100644 index 000000000..2dfbd4798 --- /dev/null +++ b/API/Services/BookService.cs @@ -0,0 +1,270 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using API.Entities.Enums; +using API.Interfaces; +using API.Parser; +using ExCSS; +using HtmlAgilityPack; +using Microsoft.Extensions.Logging; +using NetVips; +using VersOne.Epub; + +namespace API.Services +{ + public class BookService : IBookService + { + private readonly ILogger _logger; + + private const int ThumbnailWidth = 320; // 153w x 230h + private readonly StylesheetParser _cssParser = new (); + + public BookService(ILogger logger) + { + _logger = logger; + } + + private static bool HasClickableHrefPart(HtmlNode anchor) + { + return anchor.GetAttributeValue("href", string.Empty).Contains("#") + && anchor.GetAttributeValue("tabindex", string.Empty) != "-1" + && anchor.GetAttributeValue("role", string.Empty) != "presentation"; + } + + public static string GetContentType(EpubContentType type) + { + string contentType; + switch (type) + { + case EpubContentType.IMAGE_GIF: + contentType = "image/gif"; + break; + case EpubContentType.IMAGE_PNG: + contentType = "image/png"; + break; + case EpubContentType.IMAGE_JPEG: + contentType = "image/jpeg"; + break; + case EpubContentType.FONT_OPENTYPE: + contentType = "font/otf"; + break; + case EpubContentType.FONT_TRUETYPE: + contentType = "font/ttf"; + break; + case EpubContentType.IMAGE_SVG: + contentType = "image/svg+xml"; + break; + default: + contentType = "application/octet-stream"; + break; + } + + return contentType; + } + + public static void UpdateLinks(HtmlNode anchor, Dictionary mappings, int currentPage) + { + if (anchor.Name != "a") return; + var hrefParts = BookService.CleanContentKeys(anchor.GetAttributeValue("href", string.Empty)) + .Split("#"); + var mappingKey = hrefParts[0]; + if (!mappings.ContainsKey(mappingKey)) + { + if (HasClickableHrefPart(anchor)) + { + var part = hrefParts.Length > 1 + ? hrefParts[1] + : anchor.GetAttributeValue("href", string.Empty); + anchor.Attributes.Add("kavita-page", $"{currentPage}"); + anchor.Attributes.Add("kavita-part", part); + anchor.Attributes.Remove("href"); + anchor.Attributes.Add("href", "javascript:void(0)"); + } + else + { + anchor.Attributes.Add("target", "_blank"); + } + + return; + } + + var mappedPage = mappings[mappingKey]; + anchor.Attributes.Add("kavita-page", $"{mappedPage}"); + if (hrefParts.Length > 1) + { + anchor.Attributes.Add("kavita-part", + hrefParts[1]); + } + + anchor.Attributes.Remove("href"); + anchor.Attributes.Add("href", "javascript:void(0)"); + } + + public async Task ScopeStyles(string stylesheetHtml, string apiBase) + { + var styleContent = RemoveWhiteSpaceFromStylesheets(stylesheetHtml); + styleContent = + Parser.Parser.FontSrcUrlRegex.Replace(styleContent, "$1" + apiBase + "$2" + "$3"); + + styleContent = styleContent.Replace("body", ".reading-section"); + + var stylesheet = await _cssParser.ParseAsync(styleContent); + foreach (var styleRule in stylesheet.StyleRules) + { + if (styleRule.Selector.Text == ".reading-section") continue; + if (styleRule.Selector.Text.Contains(",")) + { + styleRule.Text = styleRule.Text.Replace(styleRule.SelectorText, + string.Join(", ", + styleRule.Selector.Text.Split(",").Select(s => ".reading-section " + s))); + continue; + } + styleRule.Text = ".reading-section " + styleRule.Text; + } + return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss()); + } + + public string GetSummaryInfo(string filePath) + { + if (!IsValidFile(filePath)) return string.Empty; + + var epubBook = EpubReader.OpenBook(filePath); + return epubBook.Schema.Package.Metadata.Description; + } + + private bool IsValidFile(string filePath) + { + if (!File.Exists(filePath)) + { + _logger.LogError("Book {EpubFile} could not be found", filePath); + return false; + } + + if (Parser.Parser.IsBook(filePath)) return true; + + _logger.LogError("Book {EpubFile} is not a valid EPUB", filePath); + return false; + } + + public int GetNumberOfPages(string filePath) + { + if (!IsValidFile(filePath) || !Parser.Parser.IsEpub(filePath)) return 0; + + try + { + var epubBook = EpubReader.OpenBook(filePath); + return epubBook.Content.Html.Count; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception getting number of pages, defaulting to 0"); + } + + return 0; + } + + public static string CleanContentKeys(string key) + { + return key.Replace("../", string.Empty); + } + + public async Task> CreateKeyToPageMappingAsync(EpubBookRef book) + { + var dict = new Dictionary(); + var pageCount = 0; + foreach (var contentFileRef in await book.GetReadingOrderAsync()) + { + if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) continue; + dict.Add(contentFileRef.FileName, pageCount); + pageCount += 1; + } + + return dict; + } + + /// + /// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books) + /// then null is returned. + /// + /// + /// + public ParserInfo ParseInfo(string filePath) + { + try + { + var epubBook = EpubReader.OpenBook(filePath); + + return new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Book, + Filename = Path.GetFileName(filePath), + Title = epubBook.Title, + FullFilePath = filePath, + IsSpecial = false, + Series = epubBook.Title, + Volumes = "0" + }; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception when opening epub book: {FileName}", filePath); + } + + return null; + } + + public byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true) + { + if (!IsValidFile(fileFilePath)) return Array.Empty(); + + var epubBook = EpubReader.OpenBook(fileFilePath); + + + try + { + // Try to get the cover image from OPF file, if not set, try to parse it from all the files, then result to the first one. + var coverImageContent = epubBook.Content.Cover + ?? epubBook.Content.Images.Values.FirstOrDefault(file => Parser.Parser.IsCoverImage(file.FileName)) + ?? epubBook.Content.Images.Values.First(); + + if (coverImageContent == null) return Array.Empty(); + + if (createThumbnail) + { + using var stream = new MemoryStream(coverImageContent.ReadContent()); + + using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); + return thumbnail.WriteToBuffer(".jpg"); + } + + return coverImageContent.ReadContent(); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); + } + + return Array.Empty(); + } + + private static string RemoveWhiteSpaceFromStylesheets(string body) + { + body = Regex.Replace(body, @"[a-zA-Z]+#", "#"); + body = Regex.Replace(body, @"[\n\r]+\s*", string.Empty); + body = Regex.Replace(body, @"\s+", " "); + body = Regex.Replace(body, @"\s?([:,;{}])\s?", "$1"); + body = body.Replace(";}", "}"); + body = Regex.Replace(body, @"([\s:]0)(px|pt|%|em)", "$1"); + + // Remove comments from CSS + body = Regex.Replace(body, @"/\*[\d\D]*?\*/", string.Empty); + + return body; + } + } +} \ No newline at end of file diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index 1a847fdf1..4dcad4dc5 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -4,6 +4,7 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; using API.Entities; +using API.Entities.Enums; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; @@ -20,7 +21,8 @@ namespace API.Services private readonly NumericComparer _numericComparer; public static readonly string CacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "cache/")); - public CacheService(ILogger logger, IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService) + public CacheService(ILogger logger, IUnitOfWork unitOfWork, IArchiveService archiveService, + IDirectoryService directoryService) { _logger = logger; _unitOfWork = unitOfWork; @@ -31,7 +33,6 @@ namespace API.Services public void EnsureCacheDirectory() { - _logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory); if (!DirectoryService.ExistOrCreate(CacheDirectory)) { _logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory); @@ -41,12 +42,29 @@ namespace API.Services public async Task Ensure(int chapterId) { EnsureCacheDirectory(); - Chapter chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var files = chapter.Files.ToList(); + var fileCount = files.Count; + var extractPath = GetCachePath(chapterId); + var extraPath = ""; - foreach (var file in chapter.Files) + foreach (var file in files) { - var extractPath = GetCachePath(chapterId); - _archiveService.ExtractArchive(file.FilePath, extractPath); + if (fileCount > 1) + { + extraPath = file.Id + ""; + } + + if (file.Format == MangaFormat.Archive) + { + _archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath)); + } + + } + + if (fileCount > 1) + { + new DirectoryInfo(extractPath).Flatten(); } return chapter; @@ -111,6 +129,11 @@ namespace API.Services var path = GetCachePath(chapter.Id); var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); Array.Sort(files, _numericComparer); + + if (files.Length == 0) + { + return (files.ElementAt(0), mangaFile); + } // Since array is 0 based, we need to keep that in account (only affects last image) if (page == files.Length) diff --git a/API/Services/ComicInfo.cs b/API/Services/ComicInfo.cs index 1f994d224..8277cfb35 100644 --- a/API/Services/ComicInfo.cs +++ b/API/Services/ComicInfo.cs @@ -2,14 +2,14 @@ { public class ComicInfo { - public string Summary; - public string Title; - public string Series; - public string Notes; - public string Publisher; - public string Genre; - public int PageCount; - public string LanguageISO; - public string Web; + public string Summary { get; set; } + public string Title { get; set; } + public string Series { get; set; } + public string Notes { get; set; } + public string Publisher { get; set; } + public string Genre { get; set; } + public int PageCount { get; set; } + public string LanguageISO { get; set; } + public string Web { get; set; } } } \ No newline at end of file diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 7ba691bc9..40271ccd0 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -4,12 +4,9 @@ using System.Collections.Immutable; using System.IO; using System.Linq; using System.Text.RegularExpressions; -using System.Threading; using System.Threading.Tasks; -using API.DTOs; using API.Interfaces.Services; using Microsoft.Extensions.Logging; -using NetVips; namespace API.Services { @@ -40,6 +37,40 @@ namespace API.Services reSearchPattern.IsMatch(Path.GetExtension(file))); } + /// + /// Returns a list of folders from end of fullPath to rootPath. + /// + /// Example) (C:/Manga/, C:/Manga/Love Hina/Specials/Omake/) returns [Omake, Specials, Love Hina] + /// + /// + /// + /// + public static IEnumerable GetFoldersTillRoot(string rootPath, string fullPath) + { + var separator = Path.AltDirectorySeparatorChar; + if (fullPath.Contains(Path.DirectorySeparatorChar)) + { + fullPath = fullPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } + + if (rootPath.Contains(Path.DirectorySeparatorChar)) + { + rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } + + var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath; + var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath; + var paths = new List(); + while (Path.GetDirectoryName(path) != Path.GetDirectoryName(root)) + { + var folder = new DirectoryInfo(path).Name; + paths.Add(folder); + path = path.Replace(separator + folder, string.Empty); + } + + return paths; + } + public bool Exists(string directory) { var di = new DirectoryInfo(directory); @@ -182,9 +213,9 @@ namespace API.Services /// Action to apply on file path /// Regex pattern to search against /// - public static int TraverseTreeParallelForEach(string root, Action action, string searchPattern) - { - //Count of files traversed and timer for diagnostic output + public static int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger) + { + //Count of files traversed and timer for diagnostic output var fileCount = 0; // Determine whether to parallelize file processing on each folder based on processor count. @@ -209,11 +240,13 @@ namespace API.Services // Thrown if we do not have discovery permission on the directory. catch (UnauthorizedAccessException e) { Console.WriteLine(e.Message); + logger.LogError(e, "Unauthorized access on {Directory}", currentDir); continue; } // Thrown if another process has deleted the directory after we retrieved its name. catch (DirectoryNotFoundException e) { Console.WriteLine(e.Message); + logger.LogError(e, "Directory not found on {Directory}", currentDir); continue; } @@ -235,24 +268,27 @@ namespace API.Services } // Execute in parallel if there are enough files in the directory. - // Otherwise, execute sequentially.Files are opened and processed + // Otherwise, execute sequentially. Files are opened and processed // synchronously but this could be modified to perform async I/O. try { - if (files.Length < procCount) { - foreach (var file in files) { - action(file); - fileCount++; - } - } - else { - Parallel.ForEach(files, () => 0, (file, _, localCount) => - { action(file); - return ++localCount; - }, - (c) => { - // ReSharper disable once AccessToModifiedClosure - Interlocked.Add(ref fileCount, c); - }); + // if (files.Length < procCount) { + // foreach (var file in files) { + // action(file); + // fileCount++; + // } + // } + // else { + // Parallel.ForEach(files, () => 0, (file, _, localCount) => + // { action(file); + // return ++localCount; + // }, + // (c) => { + // Interlocked.Add(ref fileCount, c); + // }); + // } + foreach (var file in files) { + action(file); + fileCount++; } } catch (AggregateException ae) { diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index 9c8ee95bb..122fc90c6 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -4,7 +4,9 @@ using System.Diagnostics; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Comparators; using API.Entities; +using API.Entities.Enums; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; @@ -17,12 +19,15 @@ namespace API.Services private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; private readonly IArchiveService _archiveService; + private readonly IBookService _bookService; + private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); - public MetadataService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService) + public MetadataService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, IBookService bookService) { _unitOfWork = unitOfWork; _logger = logger; _archiveService = archiveService; + _bookService = bookService; } private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false) @@ -30,32 +35,43 @@ namespace API.Services return forceUpdate || coverImage == null || !coverImage.Any(); } - public void UpdateMetadata(Chapter chapter, bool forceUpdate) + private byte[] GetCoverImage(MangaFile file, bool createThumbnail = true) { - if (chapter != null && ShouldFindCoverImage(chapter.CoverImage, forceUpdate)) + if (file.Format == MangaFormat.Book) { - chapter.Files ??= new List(); - var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); - if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); + return _bookService.GetCoverImage(file.FilePath, createThumbnail); + } + else + { + return _archiveService.GetCoverImage(file.FilePath, createThumbnail); } } - + public void UpdateMetadata(Chapter chapter, bool forceUpdate) + { + var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); + if (ShouldFindCoverImage(chapter.CoverImage, forceUpdate) && firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) + { + chapter.Files ??= new List(); + chapter.CoverImage = GetCoverImage(firstFile); + } + } + + public void UpdateMetadata(Volume volume, bool forceUpdate) { if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate)) { - // TODO: Create a custom sorter for Chapters so it's consistent across the application volume.Chapters ??= new List(); - var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault(); - - var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); + var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).FirstOrDefault(); + // Skip calculating Cover Image (I/O) if the chapter already has it set if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage)) { - if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified)) + var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); + if (firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) { - volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); + volume.CoverImage = GetCoverImage(firstFile); } } else @@ -67,45 +83,67 @@ namespace API.Services public void UpdateMetadata(Series series, bool forceUpdate) { - // NOTE: this doesn't actually invoke finding a new cover. Also all these should be grouped ideally so we limit - // disk I/O to one method. if (series == null) return; if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) { series.Volumes ??= new List(); - var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + var firstCover = series.Volumes.GetCoverImage(series.Library.Type); + byte[] coverImage = null; if (firstCover == null && series.Volumes.Any()) { - firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0); + // If firstCover is null and one volume, the whole series is Chapters under Vol 0. + if (series.Volumes.Count == 1) + { + coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer) + .FirstOrDefault(c => !c.IsSpecial)?.CoverImage; + } + + if (coverImage == null) + { + coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer) + .FirstOrDefault()?.CoverImage; + } } - series.CoverImage = firstCover?.CoverImage; + series.CoverImage = firstCover?.CoverImage ?? coverImage; } + UpdateSeriesSummary(series, forceUpdate); + } + + private void UpdateSeriesSummary(Series series, bool forceUpdate) + { if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return; - var firstVolume = series.Volumes.FirstOrDefault(v => v.Chapters.Any() && v.Number == 1); - var firstChapter = firstVolume?.Chapters.FirstOrDefault(c => c.Files.Any()); - + var isBook = series.Library.Type == LibraryType.Book; + var firstVolume = series.Volumes.FirstWithChapters(isBook); + var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles(); + + // NOTE: This suffers from code changes not taking effect due to stale data var firstFile = firstChapter?.Files.FirstOrDefault(); - if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified)) + if (firstFile != null && + (forceUpdate || firstFile.HasFileBeenModified())) // !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified) { - series.Summary = _archiveService.GetSummaryInfo(firstFile.FilePath); + var summary = isBook ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); + if (string.IsNullOrEmpty(series.Summary)) + { + series.Summary = summary; + } + + firstFile.LastModified = DateTime.Now; } } - + + public void RefreshMetadata(int libraryId, bool forceUpdate = false) { var sw = Stopwatch.StartNew(); - var library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result; - var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList(); - + var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); + + // TODO: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used _logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name); - foreach (var series in allSeries) + foreach (var series in library.Series) { - series.NormalizedName = Parser.Parser.Normalize(series.Name); - - var volumes = Task.Run(() => _unitOfWork.SeriesRepository.GetVolumes(series.Id)).Result.ToList(); - foreach (var volume in volumes) + foreach (var volume in series.Volumes) { foreach (var chapter in volume.Chapters) { @@ -125,5 +163,38 @@ namespace API.Services _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds); } } + + + public void RefreshMetadataForSeries(int libraryId, int seriesId) + { + var sw = Stopwatch.StartNew(); + var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); + + var series = library.Series.SingleOrDefault(s => s.Id == seriesId); + if (series == null) + { + _logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId); + return; + } + _logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name); + foreach (var volume in series.Volumes) + { + foreach (var chapter in volume.Chapters) + { + UpdateMetadata(chapter, true); + } + + UpdateMetadata(volume, true); + } + + UpdateMetadata(series, true); + _unitOfWork.SeriesRepository.Update(series); + + + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) + { + _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); + } + } } } \ No newline at end of file diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index 23e7040d0..b284fd9f7 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -5,8 +5,6 @@ using API.Helpers.Converters; using API.Interfaces; using API.Interfaces.Services; using Hangfire; -using Microsoft.AspNetCore.Hosting; -using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; namespace API.Services @@ -22,11 +20,10 @@ namespace API.Services private readonly ICleanupService _cleanupService; public static BackgroundJobServer Client => new BackgroundJobServer(); - + public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, - IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService, - IWebHostEnvironment env) + IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService) { _cacheService = cacheService; _logger = logger; @@ -35,29 +32,19 @@ namespace API.Services _metadataService = metadataService; _backupService = backupService; _cleanupService = cleanupService; - - if (!env.IsDevelopment()) - { - ScheduleTasks(); - } - else - { - RecurringJob.RemoveIfExists("scan-libraries"); - RecurringJob.RemoveIfExists("backup"); - RecurringJob.RemoveIfExists("cleanup"); - } - } public void ScheduleTasks() { _logger.LogInformation("Scheduling reoccurring tasks"); - string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value; + var setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).GetAwaiter().GetResult().Value; if (setting != null) { - _logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting); - RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting)); + var scanLibrarySetting = setting; + _logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting); + RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), + () => CronConverter.ConvertToCronNotation(scanLibrarySetting)); } else { @@ -67,7 +54,7 @@ namespace API.Services setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value; if (setting != null) { - _logger.LogDebug("Scheduling Backup Task for {Cron}", setting); + _logger.LogDebug("Scheduling Backup Task for {Setting}", setting); RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting)); } else @@ -80,10 +67,10 @@ namespace API.Services public void ScanLibrary(int libraryId, bool forceUpdate = false) { - // TODO: We shouldn't queue up a job if one is already in progress _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); - BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); - BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change + BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); + // When we do a scan, force cache to re-unpack in case page numbers change + BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); } public void CleanupChapters(int[] chapterIds) @@ -103,6 +90,12 @@ namespace API.Services BackgroundJob.Enqueue((() => DirectoryService.ClearDirectory(tempDirectory))); } + public void RefreshSeriesMetadata(int libraryId, int seriesId) + { + _logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId); + BackgroundJob.Enqueue((() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId))); + } + public void BackupDatabase() { BackgroundJob.Enqueue(() => _backupService.BackupDatabase()); diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 4e4101982..432212f6f 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -5,6 +5,8 @@ using System.Diagnostics; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Comparators; +using API.Data; using API.Entities; using API.Entities.Enums; using API.Extensions; @@ -22,20 +24,24 @@ namespace API.Services.Tasks private readonly ILogger _logger; private readonly IArchiveService _archiveService; private readonly IMetadataService _metadataService; + private readonly IBookService _bookService; private ConcurrentDictionary> _scannedSeries; - private bool _forceUpdate; + private readonly NaturalSortComparer _naturalSort; public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, - IMetadataService metadataService) + IMetadataService metadataService, IBookService bookService) { _unitOfWork = unitOfWork; _logger = logger; _archiveService = archiveService; _metadataService = metadataService; + _bookService = bookService; + _naturalSort = new NaturalSortComparer(); } - [DisableConcurrentExecution(timeoutInSeconds: 5)] - [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + + [DisableConcurrentExecution(timeoutInSeconds: 360)] + [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibraries() { var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); @@ -47,35 +53,29 @@ namespace API.Services.Tasks private bool ShouldSkipFolderScan(FolderPath folder, ref int skippedFolders) { - // NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great. + // NOTE: The only way to skip folders is if Directory hasn't been modified, we aren't doing a forcedUpdate and version hasn't changed between scans. return false; - - // if (/*_environment.IsProduction() && */!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned) + + // if (!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned) // { - // _logger.LogDebug($"{folder.Path} hasn't been updated since last scan. Skipping."); + // _logger.LogDebug("{FolderPath} hasn't been modified since last scan. Skipping", folder.Path); // skippedFolders += 1; // return true; // } - // - // return false; + + //return false; } - private void Cleanup() - { - _scannedSeries = null; - } - - [DisableConcurrentExecution(5)] - [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] + [DisableConcurrentExecution(360)] + [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibrary(int libraryId, bool forceUpdate) { - _forceUpdate = forceUpdate; var sw = Stopwatch.StartNew(); - Cleanup(); - Library library; + _scannedSeries = new ConcurrentDictionary>(); + Library library; try { - library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result; + library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); } catch (Exception ex) { @@ -84,238 +84,333 @@ namespace API.Services.Tasks return; } - _scannedSeries = new ConcurrentDictionary>(); - _logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate); - - var totalFiles = 0; - var skippedFolders = 0; - foreach (var folderPath in library.Folders) - { - if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue; - - try { - totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) => - { - try - { - ProcessFile(f, folderPath.Path); - } - catch (FileNotFoundException exception) - { - _logger.LogError(exception, "The file {Filename} could not be found", f); - } - }, Parser.Parser.MangaFileExtensions); - } - catch (ArgumentException ex) { - _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path); - } - - folderPath.LastScanned = DateTime.Now; - } - - var scanElapsedTime = sw.ElapsedMilliseconds; - _logger.LogInformation("Folders Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, scanElapsedTime); - sw.Restart(); - if (skippedFolders == library.Folders.Count) - { - _logger.LogInformation("All Folders were skipped due to no modifications to the directories"); - _unitOfWork.LibraryRepository.Update(library); - _logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name); - Cleanup(); - return; - } - - // Remove any series where there were no parsed infos - var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0); - var series = filtered.ToDictionary(v => v.Key, v => v.Value); + var series = ScanLibrariesForSeries(forceUpdate, library, sw, out var totalFiles, out var scanElapsedTime); UpdateLibrary(library, series); + _unitOfWork.LibraryRepository.Update(library); - if (Task.Run(() => _unitOfWork.Complete()).Result) { - - _logger.LogInformation("Scan completed on {LibraryName}. Parsed {ParsedSeriesCount} series in {ElapsedScanTime} ms", library.Name, series.Keys.Count, sw.ElapsedMilliseconds); + _logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); } else { - _logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan"); + _logger.LogCritical("There was a critical error that resulted in a failed scan. Please check logs and rescan"); } - - _logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); + + CleanupUserProgress(); + + BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); } + /// + /// Remove any user progress rows that no longer exist since scan library ran and deleted series/volumes/chapters + /// + private void CleanupUserProgress() + { + var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result; + _logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp); + } + + private Dictionary> ScanLibrariesForSeries(bool forceUpdate, Library library, Stopwatch sw, out int totalFiles, + out long scanElapsedTime) + { + _logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, + forceUpdate); + totalFiles = 0; + var skippedFolders = 0; + foreach (var folderPath in library.Folders) + { + if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue; + + // NOTE: we can refactor this to allow all filetypes and handle everything in the ProcessFile to allow mixed library types. + var searchPattern = Parser.Parser.ArchiveFileExtensions; + if (library.Type == LibraryType.Book) + { + searchPattern = Parser.Parser.BookFileExtensions; + } + + try + { + totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) => + { + try + { + ProcessFile(f, folderPath.Path, library.Type); + } + catch (FileNotFoundException exception) + { + _logger.LogError(exception, "The file {Filename} could not be found", f); + } + }, searchPattern, _logger); + } + catch (ArgumentException ex) + { + _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path); + } + + folderPath.LastScanned = DateTime.Now; + } + + scanElapsedTime = sw.ElapsedMilliseconds; + _logger.LogInformation("Folders Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, + scanElapsedTime); + sw.Restart(); + if (skippedFolders == library.Folders.Count) + { + _logger.LogInformation("All Folders were skipped due to no modifications to the directories"); + _unitOfWork.LibraryRepository.Update(library); + _scannedSeries = null; + _logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", + totalFiles, sw.ElapsedMilliseconds, library.Name); + return new Dictionary>(); + } + + return SeriesWithInfos(_scannedSeries); + } + + /// + /// Returns any series where there were parsed infos + /// + /// + /// + private static Dictionary> SeriesWithInfos(IDictionary> scannedSeries) + { + var filtered = scannedSeries.Where(kvp => kvp.Value.Count > 0); + var series = filtered.ToDictionary(v => v.Key, v => v.Value); + return series; + } + + private void UpdateLibrary(Library library, Dictionary> parsedSeries) { if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries)); - + // First, remove any series that are not in parsedSeries list - var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList(); - var missingSeries = library.Series.Where(existingSeries => - !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name) - || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName)) - || !parsedSeries.ContainsKey(existingSeries.OriginalName)); - var removeCount = 0; - foreach (var existingSeries in missingSeries) + var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries).ToList(); + library.Series = RemoveMissingSeries(library.Series, missingSeries, out var removeCount); + if (removeCount > 0) { - library.Series?.Remove(existingSeries); - removeCount += 1; + _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount); + foreach (var s in missingSeries) + { + _logger.LogDebug("Removed {SeriesName}", s.Name); + } } - _logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount); + // Add new series that have parsedInfos - foreach (var (key, _) in parsedSeries) + foreach (var (key, infos) in parsedSeries) { - var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key)); + // Key is normalized already + Series existingSeries; + try + { + existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key); + } + catch (Exception e) + { + _logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key); + var duplicateSeries = library.Series.Where(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key).ToList(); + foreach (var series in duplicateSeries) + { + _logger.LogCritical("{Key} maps with {Series}", key, series.OriginalName); + + } + + continue; + } if (existingSeries == null) { - existingSeries = new Series() - { - Name = key, - OriginalName = key, - LocalizedName = key, - NormalizedName = Parser.Parser.Normalize(key), - SortName = key, - Summary = "", - Volumes = new List() - }; + existingSeries = DbFactory.Series(infos[0].Series); library.Series.Add(existingSeries); - } - existingSeries.NormalizedName = Parser.Parser.Normalize(key); - existingSeries.LocalizedName ??= key; + } + + existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name); + existingSeries.OriginalName ??= infos[0].Series; } - + // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series var librarySeries = library.Series.ToList(); Parallel.ForEach(librarySeries, (series) => { - _logger.LogInformation("Processing series {SeriesName}", series.Name); - UpdateVolumes(series, parsedSeries[series.Name].ToArray()); - series.Pages = series.Volumes.Sum(v => v.Pages); - _metadataService.UpdateMetadata(series, _forceUpdate); + try + { + _logger.LogInformation("Processing series {SeriesName}", series.OriginalName); + UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray()); + series.Pages = series.Volumes.Sum(v => v.Pages); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception updating volumes for {SeriesName}", series.Name); + } }); - + } - foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; + public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries) + { + var foundSeries = parsedSeries.Select(s => s.Key).ToList(); + return existingSeries.Where(es => !es.NameInList(foundSeries)); + } + + /// + /// Removes all instances of missingSeries' Series from existingSeries Collection. Existing series is updated by + /// reference and the removed element count is returned. + /// + /// Existing Series in DB + /// Series not found on disk or can't be parsed + /// + /// the updated existingSeries + public static ICollection RemoveMissingSeries(ICollection existingSeries, IEnumerable missingSeries, out int removeCount) + { + var existingCount = existingSeries.Count; + var missingList = missingSeries.ToList(); + + existingSeries = existingSeries.Where( + s => !missingList.Exists( + m => m.NormalizedName.Equals(s.NormalizedName))).ToList(); + + removeCount = existingCount - existingSeries.Count; + + return existingSeries; } private void UpdateVolumes(Series series, ParserInfo[] parsedInfos) { var startingVolumeCount = series.Volumes.Count; // Add new volumes and update chapters per volume - var distinctVolumes = parsedInfos.Select(p => p.Volumes).Distinct().ToList(); - _logger.LogDebug("Updating {DistinctVolumes} volumes", distinctVolumes.Count); + var distinctVolumes = parsedInfos.DistinctVolumes(); + _logger.LogDebug("Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name); foreach (var volumeNumber in distinctVolumes) { - var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); - var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber); if (volume == null) { - volume = new Volume() - { - Name = volumeNumber, - Number = (int) Parser.Parser.MinimumNumberFromRange(volumeNumber), - IsSpecial = false, - Chapters = new List() - }; + volume = DbFactory.Volume(volumeNumber); series.Volumes.Add(volume); } - volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0"); + // NOTE: Instead of creating and adding? Why Not Merge a new volume into an existing, so no matter what, new properties,etc get propagated? + _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); + var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); UpdateChapters(volume, infos); volume.Pages = volume.Chapters.Sum(c => c.Pages); - _metadataService.UpdateMetadata(volume, _forceUpdate); } - - - // Remove existing volumes that aren't in parsedInfos and volumes that have no chapters - var existingVolumes = series.Volumes.ToList(); - foreach (var volume in existingVolumes) + // Remove existing volumes that aren't in parsedInfos + var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList(); + if (series.Volumes.Count != nonDeletedVolumes.Count) { - // I can't remove based on chapter count as I haven't updated Chapters || volume.Chapters.Count == 0 - var hasInfo = parsedInfos.Any(v => v.Volumes == volume.Name); - if (!hasInfo) + _logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name", + (series.Volumes.Count - nonDeletedVolumes.Count), series.Name); + var deletedVolumes = series.Volumes.Except(nonDeletedVolumes); + foreach (var volume in deletedVolumes) { - series.Volumes.Remove(volume); + var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files"; + if (new FileInfo(file).Exists) + { + _logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file); + } + _logger.LogDebug("Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file); } + + series.Volumes = nonDeletedVolumes; } _logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}", series.Name, startingVolumeCount, series.Volumes.Count); } - + + /// + /// + /// + /// + /// private void UpdateChapters(Volume volume, ParserInfo[] parsedInfos) { - var startingChapters = volume.Chapters.Count; // Add new chapters foreach (var info in parsedInfos) { - var chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters); - if (chapter == null) - { - chapter = new Chapter() - { - Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "", - Range = info.Chapters, - Files = new List() - }; - volume.Chapters.Add(chapter); - } - - chapter.Files ??= new List(); - } - - // Add files - - foreach (var info in parsedInfos) - { - Chapter chapter = null; + // Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0 + // also are treated like specials for UI grouping. + Chapter chapter; try { - chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters); + chapter = volume.Chapters.GetChapterByRange(info); } catch (Exception ex) { - _logger.LogError(ex, "There was an exception parsing chapter. Skipping Vol {VolumeNumber} Chapter {ChapterNumber}", volume.Name, info.Chapters); + _logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters); + continue; } - if (chapter == null) continue; - // I need to reset Files for the first time, hence this work should be done in a separate loop - AddOrUpdateFileForChapter(chapter, info); - chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + ""; - chapter.Range = info.Chapters; - chapter.Pages = chapter.Files.Sum(f => f.Pages); - _metadataService.UpdateMetadata(chapter, _forceUpdate); + + if (chapter == null) + { + _logger.LogDebug( + "Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters); + volume.Chapters.Add(DbFactory.Chapter(info)); + } + else + { + chapter.UpdateFrom(info); + } + } + // Add files + foreach (var info in parsedInfos) + { + var specialTreatment = info.IsSpecialInfo(); + Chapter chapter; + try + { + chapter = volume.Chapters.GetChapterByRange(info); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception parsing chapter. Skipping {SeriesName} Vol {VolumeNumber} Chapter {ChapterNumber} - Special treatment: {NeedsSpecialTreatment}", info.Series, volume.Name, info.Chapters, specialTreatment); + continue; + } + if (chapter == null) continue; + AddOrUpdateFileForChapter(chapter, info); + chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty; + chapter.Range = specialTreatment ? info.Filename : info.Chapters; + } // Remove chapters that aren't in parsedInfos or have no files linked var existingChapters = volume.Chapters.ToList(); foreach (var existingChapter in existingChapters) { - var hasInfo = parsedInfos.Any(v => v.Chapters == existingChapter.Range); - if (!hasInfo || !existingChapter.Files.Any()) + if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter)) { + _logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series); volume.Chapters.Remove(existingChapter); } + else + { + // Ensure we remove any files that no longer exist AND order + existingChapter.Files = existingChapter.Files + .Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath)) + .OrderBy(f => f.FilePath, _naturalSort).ToList(); + existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages); + } } - - _logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}", - startingChapters, volume.Chapters.Count); } /// - /// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing. + /// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing. /// /// private void TrackSeries(ParserInfo info) { if (info.Series == string.Empty) return; - _scannedSeries.AddOrUpdate(info.Series, new List() {info}, (_, oldValue) => + // Check if normalized info.Series already exists and if so, update info to use that name instead + info.Series = MergeName(_scannedSeries, info); + + _scannedSeries.AddOrUpdate(Parser.Parser.Normalize(info.Series), new List() {info}, (_, oldValue) => { oldValue ??= new List(); if (!oldValue.Contains(info)) @@ -327,33 +422,86 @@ namespace API.Services.Tasks }); } + public string MergeName(ConcurrentDictionary> collectedSeries, ParserInfo info) + { + var normalizedSeries = Parser.Parser.Normalize(info.Series); + _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries); + var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries) + .Key; + // BUG: We are comparing info.Series against a normalized string. They should never match. (This can cause series to not delete or parse correctly after a rename) + if (!string.IsNullOrEmpty(existingName)) // && info.Series != existingName + { + _logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName); + return existingName; + } + + return info.Series; + } + /// /// Processes files found during a library scan. /// Populates a collection of for DB updates later. /// /// Path of a file /// - private void ProcessFile(string path, string rootPath) + /// Library type to determine parsing to perform + private void ProcessFile(string path, string rootPath, LibraryType type) { - var info = Parser.Parser.Parse(path, rootPath); + ParserInfo info; + if (type == LibraryType.Book && Parser.Parser.IsEpub(path)) + { + info = _bookService.ParseInfo(path); + } + else + { + info = Parser.Parser.Parse(path, rootPath, type); + } + if (info == null) { - _logger.LogWarning("Could not parse series from {Path}", path); + _logger.LogWarning("[Scanner] Could not parse series from {Path}", path); return; } + if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0") + { + info = Parser.Parser.Parse(path, rootPath, type); + var info2 = _bookService.ParseInfo(path); + info.Merge(info2); + } + TrackSeries(info); } private MangaFile CreateMangaFile(ParserInfo info) { - return new MangaFile() + switch (info.Format) { - FilePath = info.FullFilePath, - Format = info.Format, - Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) - }; + case MangaFormat.Archive: + { + return new MangaFile() + { + FilePath = info.FullFilePath, + Format = info.Format, + Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) + }; + } + case MangaFormat.Book: + { + return new MangaFile() + { + FilePath = info.FullFilePath, + Format = info.Format, + Pages = _bookService.GetNumberOfPages(info.FullFilePath) + }; + } + default: + _logger.LogWarning("[Scanner] Ignoring {Filename}. Non-archives are not supported", info.Filename); + break; + } + + return null; } private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info) @@ -363,22 +511,21 @@ namespace API.Services.Tasks if (existingFile != null) { existingFile.Format = info.Format; - if (!new FileInfo(existingFile.FilePath).DoesLastWriteMatch(existingFile.LastModified)) + if (!existingFile.HasFileBeenModified() && existingFile.Pages > 0) { - existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); + existingFile.Pages = existingFile.Format == MangaFormat.Book + ? _bookService.GetNumberOfPages(info.FullFilePath) + : _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); } } else { - if (info.Format == MangaFormat.Archive) + var file = CreateMangaFile(info); + if (file != null) { - chapter.Files.Add(CreateMangaFile(info)); + chapter.Files.Add(file); existingFile = chapter.Files.Last(); } - else - { - _logger.LogDebug("Ignoring {Filename}. Non-archives are not supported", info.Filename); - } } if (existingFile != null) diff --git a/API/Startup.cs b/API/Startup.cs index 65cde44f0..4d26d933e 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -2,7 +2,7 @@ using System; using System.IO.Compression; using System.Linq; using API.Extensions; -using API.Interfaces.Services; +using API.Interfaces; using API.Middleware; using API.Services; using Hangfire; @@ -15,7 +15,6 @@ using Microsoft.AspNetCore.ResponseCompression; using Microsoft.AspNetCore.StaticFiles; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Hosting; using Microsoft.OpenApi.Models; @@ -24,12 +23,10 @@ namespace API public class Startup { private readonly IConfiguration _config; - private readonly IWebHostEnvironment _env; - public Startup(IConfiguration config, IWebHostEnvironment env) + public Startup(IConfiguration config) { _config = config; - _env = env; } // This method gets called by the runtime. Use this method to add services to the container. @@ -71,16 +68,14 @@ namespace API // Add the processing server as IHostedService services.AddHangfireServer(); - - //services.AddStartupTask(services). - services.AddTransient().TryAddSingleton(services); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. - public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, IHostApplicationLifetime applicationLifetime) + public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, + IHostApplicationLifetime applicationLifetime, ITaskScheduler taskScheduler) { app.UseMiddleware(); - + if (env.IsDevelopment()) { app.UseSwagger(); @@ -136,8 +131,11 @@ namespace API applicationLifetime.ApplicationStopping.Register(OnShutdown); applicationLifetime.ApplicationStarted.Register(() => { - Console.WriteLine("Kavita - v0.3.1"); + Console.WriteLine("Kavita - v0.4.0"); }); + + // Any services that should be bootstrapped go here + taskScheduler.ScheduleTasks(); } private void OnShutdown() diff --git a/INSTALL.txt b/INSTALL.txt index 5d052dccd..a8b83f905 100644 --- a/INSTALL.txt +++ b/INSTALL.txt @@ -1,4 +1,5 @@ How to Install -1. Unzip the archive to a directory that is writable. If on windows, do not place in Program Files -2. Open appsettings.json and modify TokenKey to a random string ideally generated from https://passwordsgenerator.net/ -3. Run Kavita executable \ No newline at end of file +1. Unzip the archive to a directory that is writable. If on windows, do not place in Program Files. +2. (Linux only) Chmod and Chown so Kavita can write to the directory you placed in. +3. Open appsettings.json and modify TokenKey to a random string ideally generated from https://passwordsgenerator.net/ +4. Run Kavita executable \ No newline at end of file diff --git a/README.md b/README.md index 271787b97..021232415 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,8 @@ Kavita is a fast, feature rich, cross platform OSS manga server. Built with a fo and the goal of being a full solution for all your reading needs. Setup your own server and share your manga collection with your friends and family! +[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem) +![Github Downloads](https://img.shields.io/github/downloads/Kareadita/Kavita/total.svg) ## Goals: @@ -15,15 +17,28 @@ your manga collection with your friends and family! * Metadata should allow for collections, want to read integration from 3rd party services, genres. * Ability to manage users, access, and ratings +## How to Build +- Ensure you've cloned Kavita-webui. You should have Projects/Kavita and Projects/Kavita-webui +- In Kavita-webui, run ng serve. This will start the webserver on localhost:4200 +- Run API project in Kavita, this will start the backend on localhost:5000 + ## How to Deploy - Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs. ## How to install -1. Unzip the archive for your target OS -2. Place in a directory that is writable. If on windows, do not place in Program Files -3. Open appsettings.json and modify TokenKey to a random string ideally generated from [https://passwordsgenerator.net/](https://passwordsgenerator.net/) -4. Run Kavita +- Unzip the archive for your target OS +- Place in a directory that is writable. If on windows, do not place in Program Files +- Open appsettings.json and modify TokenKey to a random string ideally generated from [https://passwordsgenerator.net/](https://passwordsgenerator.net/) +- Run Kavita +- If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices. + +## Docker +- Docker is supported and tested, you can find the image and instructions [here](https://github.com/Kizaing/KavitaDocker). + +## Want to help? +I am looking for developers with a passion for building the next Plex for Manga, Comics, and Ebooks. I need developers with C#/ASP.NET, Angular 11 or CSS experience. +Reach out to me on [Discord]((https://discord.gg/eczRp9eeem)). ## Buy me a beer I've gone through many beers building Kavita and expect to go through many more. If you want to throw me a few bucks you can [here](https://paypal.me/majora2007?locale.x=en_US). Money will go diff --git a/build.sh b/build.sh index 9119b76f4..d10013968 100644 --- a/build.sh +++ b/build.sh @@ -47,10 +47,10 @@ Build() BuildUI() { ProgressStart 'Building UI' - cd ../kavita-webui/ || exit + cd ../Kavita-webui/ || exit npm install npm run prod - cd ../kavita/ || exit + cd ../Kavita/ || exit ProgressEnd 'Building UI' } @@ -62,15 +62,18 @@ Package() ProgressStart "Creating $runtime Package for $framework" - + # TODO: Use no-restore? Because Build should have already done it for us echo "Building" cd API - echo dotnet publish -c release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework - dotnet publish -c release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + echo dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt + echo "Copying LICENSE" + cp ../LICENSE "$lOutputFolder"/LICENSE.txt + echo "Renaming API -> Kavita" mv "$lOutputFolder"/API "$lOutputFolder"/Kavita @@ -110,6 +113,3 @@ else Package "net5.0" "$RID" cd "$dir" fi - - - diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 000000000..1ed03f4f7 Binary files /dev/null and b/favicon.ico differ