diff --git a/API.Tests/API.Tests.csproj b/API.Tests/API.Tests.csproj index c1e0fa046..d486d9877 100644 --- a/API.Tests/API.Tests.csproj +++ b/API.Tests/API.Tests.csproj @@ -7,6 +7,7 @@ + @@ -26,6 +27,7 @@ + diff --git a/API.Tests/Entities/SeriesTest.cs b/API.Tests/Entities/SeriesTest.cs new file mode 100644 index 000000000..dd1f77b29 --- /dev/null +++ b/API.Tests/Entities/SeriesTest.cs @@ -0,0 +1,29 @@ +using System; +using API.Data; +using API.Tests.Helpers; +using Xunit; + +namespace API.Tests.Entities +{ + /// + /// Tests for + /// + public class SeriesTest + { + [Theory] + [InlineData("Darker than Black")] + public void CreateSeries(string name) + { + var key = API.Parser.Parser.Normalize(name); + var series = DbFactory.Series(name); + Assert.Equal(0, series.Id); + Assert.Equal(0, series.Pages); + Assert.Equal(name, series.Name); + Assert.Null(series.CoverImage); + Assert.Equal(name, series.LocalizedName); + Assert.Equal(name, series.SortName); + Assert.Equal(name, series.OriginalName); + Assert.Equal(key, series.NormalizedName); + } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/ChapterListExtensionsTests.cs b/API.Tests/Extensions/ChapterListExtensionsTests.cs new file mode 100644 index 000000000..2251c660b --- /dev/null +++ b/API.Tests/Extensions/ChapterListExtensionsTests.cs @@ -0,0 +1,86 @@ +using System.Collections.Generic; +using API.Entities; +using API.Entities.Enums; +using API.Extensions; +using API.Parser; +using Xunit; + +namespace API.Tests.Extensions +{ + public class ChapterListExtensionsTests + { + private Chapter CreateChapter(string range, string number, MangaFile file, bool isSpecial) + { + return new Chapter() + { + Range = range, + Number = number, + Files = new List() {file}, + IsSpecial = isSpecial + }; + } + + private MangaFile CreateFile(string file, MangaFormat format) + { + return new MangaFile() + { + FilePath = file, + Format = format + }; + } + + [Fact] + public void GetAnyChapterByRange_Test_ShouldBeNull() + { + var info = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + Filename = "darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var chapterList = new List() + { + CreateChapter("darker than black - Some special", "0", CreateFile("/manga/darker than black - special.cbz", MangaFormat.Archive), true) + }; + + var actualChapter = chapterList.GetChapterByRange(info); + + Assert.NotEqual(chapterList[0], actualChapter); + + } + + [Fact] + public void GetAnyChapterByRange_Test_ShouldBeNotNull() + { + var info = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + Filename = "darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var chapterList = new List() + { + CreateChapter("darker than black", "0", CreateFile("/manga/darker than black.cbz", MangaFormat.Archive), true) + }; + + var actualChapter = chapterList.GetChapterByRange(info); + + Assert.Equal(chapterList[0], actualChapter); + + } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/FileInfoExtensionsTests.cs b/API.Tests/Extensions/FileInfoExtensionsTests.cs new file mode 100644 index 000000000..371d8ac76 --- /dev/null +++ b/API.Tests/Extensions/FileInfoExtensionsTests.cs @@ -0,0 +1,27 @@ +using System; +using System.IO; +using API.Extensions; +using NSubstitute; +using Xunit; + +namespace API.Tests.Extensions +{ + public class FileInfoExtensionsTests + { + // [Fact] + // public void DoesLastWriteMatchTest() + // { + // var fi = Substitute.For(); + // fi.LastWriteTime = DateTime.Now; + // + // var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(1)); + // Assert.False(fi.DoesLastWriteMatch(deltaTime)); + // } + // + // [Fact] + // public void IsLastWriteLessThanTest() + // { + // + // } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/ParserInfoListExtensionsTests.cs b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs new file mode 100644 index 000000000..99f1383fc --- /dev/null +++ b/API.Tests/Extensions/ParserInfoListExtensionsTests.cs @@ -0,0 +1,42 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Entities.Enums; +using API.Extensions; +using API.Parser; +using API.Tests.Helpers; +using Xunit; + +namespace API.Tests.Extensions +{ + public class ParserInfoListExtensions + { + [Theory] + [InlineData(new string[] {"1", "1", "3-5", "5", "8", "0", "0"}, new string[] {"1", "3-5", "5", "8", "0"})] + public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers) + { + var infos = volumeNumbers.Select(n => new ParserInfo() {Volumes = n}).ToList(); + Assert.Equal(expectedNumbers, infos.DistinctVolumes()); + } + + [Theory] + [InlineData(new string[] {@"Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, new string[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] + [InlineData(new string[] {@"Cynthia The Mission - c000-006 (v06-07) [Desudesu&Brolen].zip"}, new string[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, true)] + [InlineData(new string[] {@"Cynthia The Mission v20 c12-20 [Desudesu&Brolen].zip"}, new string[] {@"E:\Manga\Cynthia the Mission\Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip"}, false)] + public void HasInfoTest(string[] inputInfos, string[] inputChapters, bool expectedHasInfo) + { + var infos = new List(); + foreach (var filename in inputInfos) + { + infos.Add(API.Parser.Parser.Parse( + filename, + string.Empty)); + } + + var files = inputChapters.Select(s => EntityFactory.CreateMangaFile(s, MangaFormat.Archive, 199)).ToList(); + var chapter = EntityFactory.CreateChapter("0-6", false, files); + + Assert.Equal(expectedHasInfo, infos.HasInfo(chapter)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Extensions/SeriesExtensionsTests.cs b/API.Tests/Extensions/SeriesExtensionsTests.cs index 687ca9ca0..59c823fe1 100644 --- a/API.Tests/Extensions/SeriesExtensionsTests.cs +++ b/API.Tests/Extensions/SeriesExtensionsTests.cs @@ -1,4 +1,5 @@ -using API.Entities; +using System; +using API.Entities; using API.Extensions; using Xunit; @@ -10,6 +11,11 @@ namespace API.Tests.Extensions [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)] [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)] [InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"Salem's Lot"}, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salems lot"}, true)] + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, true)] + // Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works + [InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, true)] public void NameInListTest(string[] seriesInput, string[] list, bool expected) { var series = new Series() @@ -17,7 +23,7 @@ namespace API.Tests.Extensions Name = seriesInput[0], LocalizedName = seriesInput[1], OriginalName = seriesInput[2], - NormalizedName = Parser.Parser.Normalize(seriesInput[0]) + NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Parser.Parser.Normalize(seriesInput[0]) }; Assert.Equal(expected, series.NameInList(list)); diff --git a/API.Tests/Helpers/EntityFactory.cs b/API.Tests/Helpers/EntityFactory.cs new file mode 100644 index 000000000..b3b09d486 --- /dev/null +++ b/API.Tests/Helpers/EntityFactory.cs @@ -0,0 +1,57 @@ +using System.Collections.Generic; +using API.Entities; +using API.Entities.Enums; + +namespace API.Tests.Helpers +{ + /// + /// Used to help quickly create DB entities for Unit Testing + /// + public static class EntityFactory + { + public static Series CreateSeries(string name) + { + return new Series() + { + Name = name, + SortName = name, + LocalizedName = name, + NormalizedName = API.Parser.Parser.Normalize(name), + Volumes = new List() + }; + } + + public static Volume CreateVolume(string volumeNumber, List chapters = null) + { + return new Volume() + { + Name = volumeNumber, + Pages = 0, + Chapters = chapters ?? new List() + }; + } + + public static Chapter CreateChapter(string range, bool isSpecial, List files = null) + { + return new Chapter() + { + IsSpecial = isSpecial, + Range = range, + Number = API.Parser.Parser.MinimumNumberFromRange(range) + string.Empty, + Files = files ?? new List(), + Pages = 0, + + }; + } + + public static MangaFile CreateMangaFile(string filename, MangaFormat format, int pages) + { + return new MangaFile() + { + FilePath = filename, + Format = format, + Pages = pages + }; + } + } +} \ No newline at end of file diff --git a/API.Tests/Helpers/ParserInfoFactory.cs b/API.Tests/Helpers/ParserInfoFactory.cs new file mode 100644 index 000000000..7dcf564e1 --- /dev/null +++ b/API.Tests/Helpers/ParserInfoFactory.cs @@ -0,0 +1,25 @@ +using System.IO; +using API.Entities.Enums; +using API.Parser; + +namespace API.Tests.Helpers +{ + public static class ParserInfoFactory + { + public static ParserInfo CreateParsedInfo(string series, string volumes, string chapters, string filename, bool isSpecial) + { + return new ParserInfo() + { + Chapters = chapters, + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = Path.Join(@"/manga/", filename), + Filename = filename, + IsSpecial = isSpecial, + Title = Path.GetFileNameWithoutExtension(filename), + Series = series, + Volumes = volumes + }; + } + } +} \ No newline at end of file diff --git a/API.Tests/Helpers/PrivateObjectPrivateType.cs b/API.Tests/Helpers/PrivateObjectPrivateType.cs deleted file mode 100644 index e99016828..000000000 --- a/API.Tests/Helpers/PrivateObjectPrivateType.cs +++ /dev/null @@ -1,1864 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. See LICENSE file in the project root for full license information. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Reflection; - -namespace Microsoft.VisualStudio.TestTools.UnitTesting -{ - /// - /// This class represents the live NON public INTERNAL object in the system - /// - public class PrivateObject - { - // bind everything - private const BindingFlags BindToEveryThing = BindingFlags.Default | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public; - - private static BindingFlags constructorFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.CreateInstance | BindingFlags.NonPublic; - - private object target; // automatically initialized to null - private Type originalType; // automatically initialized to null - - private Dictionary> methodCache; // automatically initialized to null - - /// - /// Initializes a new instance of the class that contains - /// the already existing object of the private class - /// - /// object that serves as starting point to reach the private members - /// the derefrencing string using . that points to the object to be retrived as in m_X.m_Y.m_Z - public PrivateObject(object obj, string memberToAccess) - { - ValidateAccessString(memberToAccess); - - PrivateObject temp = obj as PrivateObject; - if (temp == null) - { - temp = new PrivateObject(obj); - } - - // Split The access string - string[] arr = memberToAccess.Split(new char[] { '.' }); - - for (int i = 0; i < arr.Length; i++) - { - object next = temp.InvokeHelper(arr[i], BindToEveryThing | BindingFlags.Instance | BindingFlags.GetField | BindingFlags.GetProperty, null, CultureInfo.InvariantCulture); - temp = new PrivateObject(next); - } - - this.target = temp.target; - this.originalType = temp.originalType; - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// Name of the assembly - /// fully qualified name - /// Argmenets to pass to the constructor - public PrivateObject(string assemblyName, string typeName, params object[] args) - : this(assemblyName, typeName, null, args) - { - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// Name of the assembly - /// fully qualified name - /// An array of objects representing the number, order, and type of the parameters for the constructor to get - /// Argmenets to pass to the constructor - public PrivateObject(string assemblyName, string typeName, Type[] parameterTypes, object[] args) - : this(Type.GetType(string.Format(CultureInfo.InvariantCulture, "{0}, {1}", typeName, assemblyName), false), parameterTypes, args) - { - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// type of the object to create - /// Argmenets to pass to the constructor - public PrivateObject(Type type, params object[] args) - : this(type, null, args) - { - } - - /// - /// Initializes a new instance of the class that wraps the - /// specified type. - /// - /// type of the object to create - /// An array of objects representing the number, order, and type of the parameters for the constructor to get - /// Argmenets to pass to the constructor - public PrivateObject(Type type, Type[] parameterTypes, object[] args) - { - object o; - if (parameterTypes != null) - { - ConstructorInfo ci = type.GetConstructor(BindToEveryThing, null, parameterTypes, null); - if (ci == null) - { - throw new ArgumentException("The constructor with the specified signature could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor."); - } - - try - { - o = ci.Invoke(args); - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - else - { - o = Activator.CreateInstance(type, constructorFlags, null, args, null); - } - - this.ConstructFrom(o); - } - - /// - /// Initializes a new instance of the class that wraps - /// the given object. - /// - /// object to wrap - public PrivateObject(object obj) - { - this.ConstructFrom(obj); - } - - /// - /// Initializes a new instance of the class that wraps - /// the given object. - /// - /// object to wrap - /// PrivateType object - public PrivateObject(object obj, PrivateType type) - { - this.target = obj; - this.originalType = type.ReferencedType; - } - - /// - /// Gets or sets the target - /// - public object Target - { - get - { - return this.target; - } - - set - { - this.target = value; - this.originalType = value.GetType(); - } - } - - /// - /// Gets the type of underlying object - /// - public Type RealType - { - get - { - return this.originalType; - } - } - - private Dictionary> GenericMethodCache - { - get - { - if (this.methodCache == null) - { - this.BuildGenericMethodCacheForType(this.originalType); - } - - Debug.Assert(this.methodCache != null, "Invalid method cache for type."); - - return this.methodCache; - } - } - - /// - /// returns the hash code of the target object - /// - /// int representing hashcode of the target object - public override int GetHashCode() - { - Debug.Assert(this.target != null, "target should not be null."); - return this.target.GetHashCode(); - } - - /// - /// Equals - /// - /// Object with whom to compare - /// returns true if the objects are equal. - public override bool Equals(object obj) - { - if (this != obj) - { - Debug.Assert(this.target != null, "target should not be null."); - if (typeof(PrivateObject) == obj?.GetType()) - { - return this.target.Equals(((PrivateObject)obj).target); - } - else - { - return false; - } - } - - return true; - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, params object[] args) - { - return this.Invoke(name, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, Type[] parameterTypes, object[] args) - { - return this.Invoke(name, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// An array of types corresponding to the types of the generic arguments. - /// Result of method call - public object Invoke(string name, Type[] parameterTypes, object[] args, Type[] typeArguments) - { - return this.Invoke(name, BindToEveryThing, parameterTypes, args, CultureInfo.InvariantCulture, typeArguments); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, object[] args, CultureInfo culture) - { - return this.Invoke(name, null, args, culture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.Invoke(name, BindToEveryThing, parameterTypes, args, culture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, params object[] args) - { - return this.Invoke(name, bindingFlags, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - return this.Invoke(name, bindingFlags, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - return this.Invoke(name, bindingFlags, null, args, culture); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Culture info - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.Invoke(name, bindingFlags, parameterTypes, args, culture, null); - } - - /// - /// Invokes the specified method - /// - /// Name of the method - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the method to get. - /// Arguments to pass to the member to invoke. - /// Culture info - /// An array of types corresponding to the types of the generic arguments. - /// Result of method call - public object Invoke(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture, Type[] typeArguments) - { - if (parameterTypes != null) - { - bindingFlags |= BindToEveryThing | BindingFlags.Instance; - - // Fix up the parameter types - MethodInfo member = this.originalType.GetMethod(name, bindingFlags, null, parameterTypes, null); - - // If the method was not found and type arguments were provided for generic paramaters, - // attempt to look up a generic method. - if ((member == null) && (typeArguments != null)) - { - // This method may contain generic parameters...if so, the previous call to - // GetMethod() will fail because it doesn't fully support generic parameters. - - // Look in the method cache to see if there is a generic method - // on the incoming type that contains the correct signature. - member = this.GetGenericMethodFromCache(name, parameterTypes, typeArguments, bindingFlags, null); - } - - if (member == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - try - { - if (member.IsGenericMethodDefinition) - { - MethodInfo constructed = member.MakeGenericMethod(typeArguments); - return constructed.Invoke(this.target, bindingFlags, null, args, culture); - } - else - { - return member.Invoke(this.target, bindingFlags, null, args, culture); - } - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - else - { - return this.InvokeHelper(name, bindingFlags | BindingFlags.InvokeMethod, args, culture); - } - } - - /// - /// Gets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// the indices of array - /// An arrya of elements. - public object GetArrayElement(string name, params int[] indices) - { - return this.GetArrayElement(name, BindToEveryThing, indices); - } - - /// - /// Sets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// Value to set - /// the indices of array - public void SetArrayElement(string name, object value, params int[] indices) - { - this.SetArrayElement(name, BindToEveryThing, value, indices); - } - - /// - /// Gets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// A bitmask comprised of one or more that specify how the search is conducted. - /// the indices of array - /// An arrya of elements. - public object GetArrayElement(string name, BindingFlags bindingFlags, params int[] indices) - { - Array arr = (Array)this.InvokeHelper(name, BindingFlags.GetField | bindingFlags, null, CultureInfo.InvariantCulture); - return arr.GetValue(indices); - } - - /// - /// Sets the array element using array of subsrcipts for each dimension - /// - /// Name of the member - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Value to set - /// the indices of array - public void SetArrayElement(string name, BindingFlags bindingFlags, object value, params int[] indices) - { - Array arr = (Array)this.InvokeHelper(name, BindingFlags.GetField | bindingFlags, null, CultureInfo.InvariantCulture); - arr.SetValue(value, indices); - } - - /// - /// Get the field - /// - /// Name of the field - /// The field. - public object GetField(string name) - { - return this.GetField(name, BindToEveryThing); - } - - /// - /// Sets the field - /// - /// Name of the field - /// value to set - public void SetField(string name, object value) - { - this.SetField(name, BindToEveryThing, value); - } - - /// - /// Gets the field - /// - /// Name of the field - /// A bitmask comprised of one or more that specify how the search is conducted. - /// The field. - public object GetField(string name, BindingFlags bindingFlags) - { - return this.InvokeHelper(name, BindingFlags.GetField | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the field - /// - /// Name of the field - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - public void SetField(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelper(name, BindingFlags.SetField | bindingFlags, new object[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Get the field or property - /// - /// Name of the field or property - /// The field or property. - public object GetFieldOrProperty(string name) - { - return this.GetFieldOrProperty(name, BindToEveryThing); - } - - /// - /// Sets the field or property - /// - /// Name of the field or property - /// value to set - public void SetFieldOrProperty(string name, object value) - { - this.SetFieldOrProperty(name, BindToEveryThing, value); - } - - /// - /// Gets the field or property - /// - /// Name of the field or property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// The field or property. - public object GetFieldOrProperty(string name, BindingFlags bindingFlags) - { - return this.InvokeHelper(name, BindingFlags.GetField | BindingFlags.GetProperty | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the field or property - /// - /// Name of the field or property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - public void SetFieldOrProperty(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelper(name, BindingFlags.SetField | BindingFlags.SetProperty | bindingFlags, new object[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Gets the property - /// - /// Name of the property - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, params object[] args) - { - return this.GetProperty(name, null, args); - } - - /// - /// Gets the property - /// - /// Name of the property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, Type[] parameterTypes, object[] args) - { - return this.GetProperty(name, BindToEveryThing, parameterTypes, args); - } - - /// - /// Set the property - /// - /// Name of the property - /// value to set - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, object value, params object[] args) - { - this.SetProperty(name, null, value, args); - } - - /// - /// Set the property - /// - /// Name of the property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// value to set - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, Type[] parameterTypes, object value, object[] args) - { - this.SetProperty(name, BindToEveryThing, value, parameterTypes, args); - } - - /// - /// Gets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, BindingFlags bindingFlags, params object[] args) - { - return this.GetProperty(name, bindingFlags, null, args); - } - - /// - /// Gets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - /// The property. - public object GetProperty(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.originalType.GetProperty(name, bindingFlags, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - return pi.GetValue(this.target, args); - } - else - { - return this.InvokeHelper(name, bindingFlags | BindingFlags.GetProperty, args, null); - } - } - - /// - /// Sets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, BindingFlags bindingFlags, object value, params object[] args) - { - this.SetProperty(name, bindingFlags, value, null, args); - } - - /// - /// Sets the property - /// - /// Name of the property - /// A bitmask comprised of one or more that specify how the search is conducted. - /// value to set - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - public void SetProperty(string name, BindingFlags bindingFlags, object value, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.originalType.GetProperty(name, bindingFlags, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - pi.SetValue(this.target, value, args); - } - else - { - object[] pass = new object[(args?.Length ?? 0) + 1]; - pass[0] = value; - args?.CopyTo(pass, 1); - this.InvokeHelper(name, bindingFlags | BindingFlags.SetProperty, pass, null); - } - } - - /// - /// Validate access string - /// - /// access string - private static void ValidateAccessString(string access) - { - if (access.Length == 0) - { - throw new ArgumentException("Access string has invalid syntax."); - } - - string[] arr = access.Split('.'); - foreach (string str in arr) - { - if ((str.Length == 0) || (str.IndexOfAny(new char[] { ' ', '\t', '\n' }) != -1)) - { - throw new ArgumentException("Access string has invalid syntax."); - } - } - } - - /// - /// Invokes the memeber - /// - /// Name of the member - /// Additional attributes - /// Arguments for the invocation - /// Culture - /// Result of the invocation - private object InvokeHelper(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - Debug.Assert(this.target != null, "Internal Error: Null reference is returned for internal object"); - - // Invoke the actual Method - try - { - return this.originalType.InvokeMember(name, bindingFlags, null, this.target, args, culture); - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - - private void ConstructFrom(object obj) - { - this.target = obj; - this.originalType = obj.GetType(); - } - - private void BuildGenericMethodCacheForType(Type t) - { - Debug.Assert(t != null, "type should not be null."); - this.methodCache = new Dictionary>(); - - MethodInfo[] members = t.GetMethods(BindToEveryThing); - LinkedList listByName; // automatically initialized to null - - foreach (MethodInfo member in members) - { - if (member.IsGenericMethod || member.IsGenericMethodDefinition) - { - if (!this.GenericMethodCache.TryGetValue(member.Name, out listByName)) - { - listByName = new LinkedList(); - this.GenericMethodCache.Add(member.Name, listByName); - } - - Debug.Assert(listByName != null, "list should not be null."); - listByName.AddLast(member); - } - } - } - - /// - /// Extracts the most appropriate generic method signature from the current private type. - /// - /// The name of the method in which to search the signature cache. - /// An array of types corresponding to the types of the parameters in which to search. - /// An array of types corresponding to the types of the generic arguments. - /// to further filter the method signatures. - /// Modifiers for parameters. - /// A methodinfo instance. - private MethodInfo GetGenericMethodFromCache(string methodName, Type[] parameterTypes, Type[] typeArguments, BindingFlags bindingFlags, ParameterModifier[] modifiers) - { - Debug.Assert(!string.IsNullOrEmpty(methodName), "Invalid method name."); - Debug.Assert(parameterTypes != null, "Invalid parameter type array."); - Debug.Assert(typeArguments != null, "Invalid type arguments array."); - - // Build a preliminary list of method candidates that contain roughly the same signature. - var methodCandidates = this.GetMethodCandidates(methodName, parameterTypes, typeArguments, bindingFlags, modifiers); - - // Search of ambiguous methods (methods with the same signature). - MethodInfo[] finalCandidates = new MethodInfo[methodCandidates.Count]; - methodCandidates.CopyTo(finalCandidates, 0); - - if ((parameterTypes != null) && (parameterTypes.Length == 0)) - { - for (int i = 0; i < finalCandidates.Length; i++) - { - MethodInfo methodInfo = finalCandidates[i]; - - if (!RuntimeTypeHelper.CompareMethodSigAndName(methodInfo, finalCandidates[0])) - { - throw new AmbiguousMatchException(); - } - } - - // All the methods have the exact same name and sig so return the most derived one. - return RuntimeTypeHelper.FindMostDerivedNewSlotMeth(finalCandidates, finalCandidates.Length) as MethodInfo; - } - - // Now that we have a preliminary list of candidates, select the most appropriate one. - return RuntimeTypeHelper.SelectMethod(bindingFlags, finalCandidates, parameterTypes, modifiers) as MethodInfo; - } - - private LinkedList GetMethodCandidates(string methodName, Type[] parameterTypes, Type[] typeArguments, BindingFlags bindingFlags, ParameterModifier[] modifiers) - { - Debug.Assert(!string.IsNullOrEmpty(methodName), "methodName should not be null."); - Debug.Assert(parameterTypes != null, "parameterTypes should not be null."); - Debug.Assert(typeArguments != null, "typeArguments should not be null."); - - LinkedList methodCandidates = new LinkedList(); - LinkedList methods = null; - - if (!this.GenericMethodCache.TryGetValue(methodName, out methods)) - { - return methodCandidates; - } - - Debug.Assert(methods != null, "methods should not be null."); - - foreach (MethodInfo candidate in methods) - { - bool paramMatch = true; - ParameterInfo[] candidateParams = null; - Type[] genericArgs = candidate.GetGenericArguments(); - Type sourceParameterType = null; - - if (genericArgs.Length != typeArguments.Length) - { - continue; - } - - // Since we can't just get the correct MethodInfo from Reflection, - // we will just match the number of parameters, their order, and their type - var methodCandidate = candidate; - candidateParams = methodCandidate.GetParameters(); - - if (candidateParams.Length != parameterTypes.Length) - { - continue; - } - - // Exact binding - if ((bindingFlags & BindingFlags.ExactBinding) != 0) - { - int i = 0; - - foreach (ParameterInfo candidateParam in candidateParams) - { - sourceParameterType = parameterTypes[i++]; - - if (candidateParam.ParameterType.ContainsGenericParameters) - { - // Since we have a generic parameter here, just make sure the IsArray matches. - if (candidateParam.ParameterType.IsArray != sourceParameterType.IsArray) - { - paramMatch = false; - break; - } - } - else - { - if (candidateParam.ParameterType != sourceParameterType) - { - paramMatch = false; - break; - } - } - } - - if (paramMatch) - { - methodCandidates.AddLast(methodCandidate); - continue; - } - } - else - { - methodCandidates.AddLast(methodCandidate); - } - } - - return methodCandidates; - } - } - - /// - /// This class represents a private class for the Private Accessor functionality. - /// - public class PrivateType - { - /// - /// Binds to everything - /// - private const BindingFlags BindToEveryThing = BindingFlags.Default - | BindingFlags.NonPublic | BindingFlags.Instance - | BindingFlags.Public | BindingFlags.Static | BindingFlags.FlattenHierarchy; - - /// - /// The wrapped type. - /// - private Type type; - - /// - /// Initializes a new instance of the class that contains the private type. - /// - /// Assembly name - /// fully qualified name of the - public PrivateType(string assemblyName, string typeName) - { - Assembly asm = Assembly.Load(assemblyName); - - this.type = asm.GetType(typeName, true); - } - - /// - /// Initializes a new instance of the class that contains - /// the private type from the type object - /// - /// The wrapped Type to create. - public PrivateType(Type type) - { - if (type == null) - { - throw new ArgumentNullException("type"); - } - - this.type = type; - } - - /// - /// Gets the referenced type - /// - public Type ReferencedType => this.type; - - /// - /// Invokes static member - /// - /// Name of the member to InvokeHelper - /// Arguements to the invoction - /// Result of invocation - public object InvokeStatic(string name, params object[] args) - { - return this.InvokeStatic(name, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes static member - /// - /// Name of the member to InvokeHelper - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invoction - /// Result of invocation - public object InvokeStatic(string name, Type[] parameterTypes, object[] args) - { - return this.InvokeStatic(name, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes static member - /// - /// Name of the member to InvokeHelper - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invoction - /// An array of types corresponding to the types of the generic arguments. - /// Result of invocation - public object InvokeStatic(string name, Type[] parameterTypes, object[] args, Type[] typeArguments) - { - return this.InvokeStatic(name, BindToEveryThing, parameterTypes, args, CultureInfo.InvariantCulture, typeArguments); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Arguements to the invocation - /// Culture - /// Result of invocation - public object InvokeStatic(string name, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, null, args, culture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Culture info - /// Result of invocation - public object InvokeStatic(string name, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, BindingFlags.InvokeMethod, parameterTypes, args, culture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// Arguements to the invocation - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, params object[] args) - { - return this.InvokeStatic(name, bindingFlags, null, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - return this.InvokeStatic(name, bindingFlags, parameterTypes, args, CultureInfo.InvariantCulture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// Arguements to the invocation - /// Culture - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, bindingFlags, null, args, culture); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Culture - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture) - { - return this.InvokeStatic(name, bindingFlags, parameterTypes, args, culture, null); - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// /// An array of objects representing the number, order, and type of the parameters for the method to invoke - /// Arguements to the invocation - /// Culture - /// An array of types corresponding to the types of the generic arguments. - /// Result of invocation - public object InvokeStatic(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args, CultureInfo culture, Type[] typeArguments) - { - if (parameterTypes != null) - { - MethodInfo member = this.type.GetMethod(name, bindingFlags | BindToEveryThing | BindingFlags.Static, null, parameterTypes, null); - if (member == null) - { - throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - try - { - if (member.IsGenericMethodDefinition) - { - MethodInfo constructed = member.MakeGenericMethod(typeArguments); - return constructed.Invoke(null, bindingFlags, null, args, culture); - } - else - { - return member.Invoke(null, bindingFlags, null, args, culture); - } - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner Exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - else - { - return this.InvokeHelperStatic(name, bindingFlags | BindingFlags.InvokeMethod, args, culture); - } - } - - /// - /// Gets the element in static array - /// - /// Name of the array - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to get. For instance, to access a[10][11] the indices would be {10,11} - /// - /// element at the specified location - public object GetStaticArrayElement(string name, params int[] indices) - { - return this.GetStaticArrayElement(name, BindToEveryThing, indices); - } - - /// - /// Sets the memeber of the static array - /// - /// Name of the array - /// value to set - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to set. For instance, to access a[10][11] the array would be {10,11} - /// - public void SetStaticArrayElement(string name, object value, params int[] indices) - { - this.SetStaticArrayElement(name, BindToEveryThing, value, indices); - } - - /// - /// Gets the element in satatic array - /// - /// Name of the array - /// Additional InvokeHelper attributes - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to get. For instance, to access a[10][11] the array would be {10,11} - /// - /// element at the spcified location - public object GetStaticArrayElement(string name, BindingFlags bindingFlags, params int[] indices) - { - Array arr = (Array)this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.GetProperty | bindingFlags, null, CultureInfo.InvariantCulture); - return arr.GetValue(indices); - } - - /// - /// Sets the memeber of the static array - /// - /// Name of the array - /// Additional InvokeHelper attributes - /// value to set - /// - /// A one-dimensional array of 32-bit integers that represent the indexes specifying - /// the position of the element to set. For instance, to access a[10][11] the array would be {10,11} - /// - public void SetStaticArrayElement(string name, BindingFlags bindingFlags, object value, params int[] indices) - { - Array arr = (Array)this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.GetProperty | BindingFlags.Static | bindingFlags, null, CultureInfo.InvariantCulture); - arr.SetValue(value, indices); - } - - /// - /// Gets the static field - /// - /// Name of the field - /// The static field. - public object GetStaticField(string name) - { - return this.GetStaticField(name, BindToEveryThing); - } - - /// - /// Sets the static field - /// - /// Name of the field - /// Arguement to the invocation - public void SetStaticField(string name, object value) - { - this.SetStaticField(name, BindToEveryThing, value); - } - - /// - /// Gets the static field using specified InvokeHelper attributes - /// - /// Name of the field - /// Additional invocation attributes - /// The static field. - public object GetStaticField(string name, BindingFlags bindingFlags) - { - return this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.Static | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the static field using binding attributes - /// - /// Name of the field - /// Additional InvokeHelper attributes - /// Arguement to the invocation - public void SetStaticField(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelperStatic(name, BindingFlags.SetField | bindingFlags | BindingFlags.Static, new[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Gets the static field or property - /// - /// Name of the field or property - /// The static field or property. - public object GetStaticFieldOrProperty(string name) - { - return this.GetStaticFieldOrProperty(name, BindToEveryThing); - } - - /// - /// Sets the static field or property - /// - /// Name of the field or property - /// Value to be set to field or property - public void SetStaticFieldOrProperty(string name, object value) - { - this.SetStaticFieldOrProperty(name, BindToEveryThing, value); - } - - /// - /// Gets the static field or property using specified InvokeHelper attributes - /// - /// Name of the field or property - /// Additional invocation attributes - /// The static field or property. - public object GetStaticFieldOrProperty(string name, BindingFlags bindingFlags) - { - return this.InvokeHelperStatic(name, BindingFlags.GetField | BindingFlags.GetProperty | BindingFlags.Static | bindingFlags, null, CultureInfo.InvariantCulture); - } - - /// - /// Sets the static field or property using binding attributes - /// - /// Name of the field or property - /// Additional invocation attributes - /// Value to be set to field or property - public void SetStaticFieldOrProperty(string name, BindingFlags bindingFlags, object value) - { - this.InvokeHelperStatic(name, BindingFlags.SetField | BindingFlags.SetProperty | bindingFlags | BindingFlags.Static, new[] { value }, CultureInfo.InvariantCulture); - } - - /// - /// Gets the static property - /// - /// Name of the field or property - /// Arguements to the invocation - /// The static property. - public object GetStaticProperty(string name, params object[] args) - { - return this.GetStaticProperty(name, BindToEveryThing, args); - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Value to be set to field or property - /// Arguments to pass to the member to invoke. - public void SetStaticProperty(string name, object value, params object[] args) - { - this.SetStaticProperty(name, BindToEveryThing, value, null, args); - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Value to be set to field or property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - public void SetStaticProperty(string name, object value, Type[] parameterTypes, object[] args) - { - this.SetStaticProperty(name, BindingFlags.SetProperty, value, parameterTypes, args); - } - - /// - /// Gets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// Arguments to pass to the member to invoke. - /// The static property. - public object GetStaticProperty(string name, BindingFlags bindingFlags, params object[] args) - { - return this.GetStaticProperty(name, BindingFlags.GetProperty | BindingFlags.Static | bindingFlags, null, args); - } - - /// - /// Gets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - /// The static property. - public object GetStaticProperty(string name, BindingFlags bindingFlags, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.type.GetProperty(name, bindingFlags | BindingFlags.Static, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - return pi.GetValue(null, args); - } - else - { - return this.InvokeHelperStatic(name, bindingFlags | BindingFlags.GetProperty, args, null); - } - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// Value to be set to field or property - /// Optional index values for indexed properties. The indexes of indexed properties are zero-based. This value should be null for non-indexed properties. - public void SetStaticProperty(string name, BindingFlags bindingFlags, object value, params object[] args) - { - this.SetStaticProperty(name, bindingFlags, value, null, args); - } - - /// - /// Sets the static property - /// - /// Name of the property - /// Additional invocation attributes. - /// Value to be set to field or property - /// An array of objects representing the number, order, and type of the parameters for the indexed property. - /// Arguments to pass to the member to invoke. - public void SetStaticProperty(string name, BindingFlags bindingFlags, object value, Type[] parameterTypes, object[] args) - { - if (parameterTypes != null) - { - PropertyInfo pi = this.type.GetProperty(name, bindingFlags | BindingFlags.Static, null, null, parameterTypes, null); - if (pi == null) - { - throw new ArgumentException( - string.Format(CultureInfo.CurrentCulture, "The member specified ({0}) could not be found. You might need to regenerate your private accessor, or the member may be private and defined on a base class. If the latter is true, you need to pass the type that defines the member into PrivateObject's constructor.", name)); - } - - pi.SetValue(null, value, args); - } - else - { - object[] pass = new object[(args?.Length ?? 0) + 1]; - pass[0] = value; - args?.CopyTo(pass, 1); - this.InvokeHelperStatic(name, bindingFlags | BindingFlags.SetProperty, pass, null); - } - } - - /// - /// Invokes the static method - /// - /// Name of the member - /// Additional invocation attributes - /// Arguements to the invocation - /// Culture - /// Result of invocation - private object InvokeHelperStatic(string name, BindingFlags bindingFlags, object[] args, CultureInfo culture) - { - try - { - return this.type.InvokeMember(name, bindingFlags | BindToEveryThing | BindingFlags.Static, null, null, args, culture); - } - catch (TargetInvocationException e) - { - Debug.Assert(e.InnerException != null, "Inner Exception should not be null."); - if (e.InnerException != null) - { - throw e.InnerException; - } - - throw; - } - } - } - - /// - /// Provides method signature discovery for generic methods. - /// - internal class RuntimeTypeHelper - { - /// - /// Compares the method signatures of these two methods. - /// - /// Method1 - /// Method2 - /// True if they are similiar. - internal static bool CompareMethodSigAndName(MethodBase m1, MethodBase m2) - { - ParameterInfo[] params1 = m1.GetParameters(); - ParameterInfo[] params2 = m2.GetParameters(); - - if (params1.Length != params2.Length) - { - return false; - } - - int numParams = params1.Length; - for (int i = 0; i < numParams; i++) - { - if (params1[i].ParameterType != params2[i].ParameterType) - { - return false; - } - } - - return true; - } - - /// - /// Gets the hierarchy depth from the base type of the provided type. - /// - /// The type. - /// The depth. - internal static int GetHierarchyDepth(Type t) - { - int depth = 0; - - Type currentType = t; - do - { - depth++; - currentType = currentType.BaseType; - } - while (currentType != null); - - return depth; - } - - /// - /// Finds most dervied type with the provided information. - /// - /// Candidate matches. - /// Number of matches. - /// The most derived method. - internal static MethodBase FindMostDerivedNewSlotMeth(MethodBase[] match, int cMatches) - { - int deepestHierarchy = 0; - MethodBase methWithDeepestHierarchy = null; - - for (int i = 0; i < cMatches; i++) - { - // Calculate the depth of the hierarchy of the declaring type of the - // current method. - int currentHierarchyDepth = GetHierarchyDepth(match[i].DeclaringType); - - // Two methods with the same hierarchy depth are not allowed. This would - // mean that there are 2 methods with the same name and sig on a given type - // which is not allowed, unless one of them is vararg... - if (currentHierarchyDepth == deepestHierarchy) - { - if (methWithDeepestHierarchy != null) - { - Debug.Assert( - methWithDeepestHierarchy != null && ((match[i].CallingConvention & CallingConventions.VarArgs) - | (methWithDeepestHierarchy.CallingConvention & CallingConventions.VarArgs)) != 0, - "Calling conventions: " + match[i].CallingConvention + " - " + methWithDeepestHierarchy.CallingConvention); - } - - throw new AmbiguousMatchException(); - } - - // Check to see if this method is on the most derived class. - if (currentHierarchyDepth > deepestHierarchy) - { - deepestHierarchy = currentHierarchyDepth; - methWithDeepestHierarchy = match[i]; - } - } - - return methWithDeepestHierarchy; - } - - /// - /// Given a set of methods that match the base criteria, select a method based - /// upon an array of types. This method should return null if no method matches - /// the criteria. - /// - /// Binding specification. - /// Candidate matches - /// Types - /// Parameter modifiers. - /// Matching method. Null if none matches. - internal static MethodBase SelectMethod(BindingFlags bindingAttr, MethodBase[] match, Type[] types, ParameterModifier[] modifiers) - { - if (match == null) - { - throw new ArgumentNullException("match"); - } - - int i; - int j; - - Type[] realTypes = new Type[types.Length]; - for (i = 0; i < types.Length; i++) - { - realTypes[i] = types[i].UnderlyingSystemType; - } - - types = realTypes; - - // If there are no methods to match to, then return null, indicating that no method - // matches the criteria - if (match.Length == 0) - { - return null; - } - - // Find all the methods that can be described by the types parameter. - // Remove all of them that cannot. - int curIdx = 0; - for (i = 0; i < match.Length; i++) - { - ParameterInfo[] par = match[i].GetParameters(); - if (par.Length != types.Length) - { - continue; - } - - for (j = 0; j < types.Length; j++) - { - Type pCls = par[j].ParameterType; - - if (pCls.ContainsGenericParameters) - { - if (pCls.IsArray != types[j].IsArray) - { - break; - } - } - else - { - if (pCls == types[j]) - { - continue; - } - - if (pCls == typeof(object)) - { - continue; - } - else - { - if (!pCls.IsAssignableFrom(types[j])) - { - break; - } - } - } - } - - if (j == types.Length) - { - match[curIdx++] = match[i]; - } - } - - if (curIdx == 0) - { - return null; - } - - if (curIdx == 1) - { - return match[0]; - } - - // Walk all of the methods looking the most specific method to invoke - int currentMin = 0; - bool ambig = false; - int[] paramOrder = new int[types.Length]; - for (i = 0; i < types.Length; i++) - { - paramOrder[i] = i; - } - - for (i = 1; i < curIdx; i++) - { - int newMin = FindMostSpecificMethod(match[currentMin], paramOrder, null, match[i], paramOrder, null, types, null); - if (newMin == 0) - { - ambig = true; - } - else - { - if (newMin == 2) - { - currentMin = i; - ambig = false; - currentMin = i; - } - } - } - - if (ambig) - { - throw new AmbiguousMatchException(); - } - - return match[currentMin]; - } - - /// - /// Finds the most specific method in the two methods provided. - /// - /// Method 1 - /// Parameter order for Method 1 - /// Paramter array type. - /// Method 2 - /// Parameter order for Method 2 - /// >Paramter array type. - /// Types to search in. - /// Args. - /// An int representing the match. - internal static int FindMostSpecificMethod( - MethodBase m1, - int[] paramOrder1, - Type paramArrayType1, - MethodBase m2, - int[] paramOrder2, - Type paramArrayType2, - Type[] types, - object[] args) - { - // Find the most specific method based on the parameters. - int res = FindMostSpecific( - m1.GetParameters(), - paramOrder1, - paramArrayType1, - m2.GetParameters(), - paramOrder2, - paramArrayType2, - types, - args); - - // If the match was not ambiguous then return the result. - if (res != 0) - { - return res; - } - - // Check to see if the methods have the exact same name and signature. - if (CompareMethodSigAndName(m1, m2)) - { - // Determine the depth of the declaring types for both methods. - int hierarchyDepth1 = GetHierarchyDepth(m1.DeclaringType); - int hierarchyDepth2 = GetHierarchyDepth(m2.DeclaringType); - - // The most derived method is the most specific one. - if (hierarchyDepth1 == hierarchyDepth2) - { - return 0; - } - else if (hierarchyDepth1 < hierarchyDepth2) - { - return 2; - } - else - { - return 1; - } - } - - // The match is ambiguous. - return 0; - } - - /// - /// Finds the most specific method in the two methods provided. - /// - /// Method 1 - /// Parameter order for Method 1 - /// Paramter array type. - /// Method 2 - /// Parameter order for Method 2 - /// >Paramter array type. - /// Types to search in. - /// Args. - /// An int representing the match. - internal static int FindMostSpecific( - ParameterInfo[] p1, - int[] paramOrder1, - Type paramArrayType1, - ParameterInfo[] p2, - int[] paramOrder2, - Type paramArrayType2, - Type[] types, - object[] args) - { - // A method using params is always less specific than one not using params - if (paramArrayType1 != null && paramArrayType2 == null) - { - return 2; - } - - if (paramArrayType2 != null && paramArrayType1 == null) - { - return 1; - } - - bool p1Less = false; - bool p2Less = false; - - for (int i = 0; i < types.Length; i++) - { - if (args != null && args[i] == Type.Missing) - { - continue; - } - - Type c1, c2; - - // If a param array is present, then either - // the user re-ordered the parameters in which case - // the argument to the param array is either an array - // in which case the params is conceptually ignored and so paramArrayType1 == null - // or the argument to the param array is a single element - // in which case paramOrder[i] == p1.Length - 1 for that element - // or the user did not re-order the parameters in which case - // the paramOrder array could contain indexes larger than p.Length - 1 - //// so any index >= p.Length - 1 is being put in the param array - - if (paramArrayType1 != null && paramOrder1[i] >= p1.Length - 1) - { - c1 = paramArrayType1; - } - else - { - c1 = p1[paramOrder1[i]].ParameterType; - } - - if (paramArrayType2 != null && paramOrder2[i] >= p2.Length - 1) - { - c2 = paramArrayType2; - } - else - { - c2 = p2[paramOrder2[i]].ParameterType; - } - - if (c1 == c2) - { - continue; - } - - if (c1.ContainsGenericParameters || c2.ContainsGenericParameters) - { - continue; - } - - switch (FindMostSpecificType(c1, c2, types[i])) - { - case 0: - return 0; - case 1: - p1Less = true; - break; - case 2: - p2Less = true; - break; - } - } - - // Two way p1Less and p2Less can be equal. All the arguments are the - // same they both equal false, otherwise there were things that both - // were the most specific type on.... - if (p1Less == p2Less) - { - // it's possible that the 2 methods have same sig and default param in which case we match the one - // with the same number of args but only if they were exactly the same (that is p1Less and p2Lees are both false) - if (!p1Less && p1.Length != p2.Length && args != null) - { - if (p1.Length == args.Length) - { - return 1; - } - else if (p2.Length == args.Length) - { - return 2; - } - } - - return 0; - } - else - { - return (p1Less == true) ? 1 : 2; - } - } - - /// - /// Finds the most specific type in the two provided. - /// - /// Type 1 - /// Type 2 - /// The defining type - /// An int representing the match. - internal static int FindMostSpecificType(Type c1, Type c2, Type t) - { - // If the two types are exact move on... - if (c1 == c2) - { - return 0; - } - - if (c1 == t) - { - return 1; - } - - if (c2 == t) - { - return 2; - } - - bool c1FromC2; - bool c2FromC1; - - if (c1.IsByRef || c2.IsByRef) - { - if (c1.IsByRef && c2.IsByRef) - { - c1 = c1.GetElementType(); - c2 = c2.GetElementType(); - } - else if (c1.IsByRef) - { - if (c1.GetElementType() == c2) - { - return 2; - } - - c1 = c1.GetElementType(); - } - else - { - if (c2.GetElementType() == c1) - { - return 1; - } - - c2 = c2.GetElementType(); - } - } - - if (c1.IsPrimitive && c2.IsPrimitive) - { - c1FromC2 = true; - c2FromC1 = true; - } - else - { - c1FromC2 = c1.IsAssignableFrom(c2); - c2FromC1 = c2.IsAssignableFrom(c1); - } - - if (c1FromC2 == c2FromC1) - { - return 0; - } - - if (c1FromC2) - { - return 2; - } - else - { - return 1; - } - } - } -} \ No newline at end of file diff --git a/API.Tests/Helpers/TestCaseGenerator.cs b/API.Tests/Helpers/TestCaseGenerator.cs new file mode 100644 index 000000000..57923dfb2 --- /dev/null +++ b/API.Tests/Helpers/TestCaseGenerator.cs @@ -0,0 +1,55 @@ +using System.Collections.Generic; +using System.IO; +using API.Services; + +namespace API.Tests.Helpers +{ + /// + /// Given a -testcase.txt file, will generate a folder with fake archive or book files. These files are just renamed txt files. + /// This currently is broken - you cannot create files from a unit test it seems + /// + public static class TestCaseGenerator + { + public static string GenerateFiles(string directory, string fileToExpand) + { + //var files = Directory.GetFiles(directory, fileToExpand); + var file = new FileInfo(fileToExpand); + if (!file.Exists && file.Name.EndsWith("-testcase.txt")) return string.Empty; + + var baseDirectory = TestCaseGenerator.CreateTestBase(fileToExpand, directory); + var filesToCreate = File.ReadLines(file.FullName); + foreach (var fileToCreate in filesToCreate) + { + // var folders = DirectoryService.GetFoldersTillRoot(directory, fileToCreate); + // foreach (var VARIABLE in COLLECTION) + // { + // + // } + File.Create(fileToCreate); + } + + + + + return baseDirectory; + } + + /// + /// Creates and returns a new base directory for data creation for a given testcase + /// + /// + /// + /// + private static string CreateTestBase(string file, string rootDirectory) + { + var baseDir = file.Split("-testcase.txt")[0]; + var newDirectory = Path.Join(rootDirectory, baseDir); + if (!Directory.Exists(newDirectory)) + { + new DirectoryInfo(newDirectory).Create(); + } + + return newDirectory; + } + } +} \ No newline at end of file diff --git a/API.Tests/Parser/BookParserTests.cs b/API.Tests/Parser/BookParserTests.cs new file mode 100644 index 000000000..0a43b7c73 --- /dev/null +++ b/API.Tests/Parser/BookParserTests.cs @@ -0,0 +1,15 @@ +using API.Services; +using Xunit; + +namespace API.Tests.Parser +{ + public class BookParserTests + { + [Theory] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", "Gifting The Wonderful World With Blessings!")] + public void ParseSeriesTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs new file mode 100644 index 000000000..9d91a5feb --- /dev/null +++ b/API.Tests/Parser/ComicParserTests.cs @@ -0,0 +1,69 @@ +using Xunit; + +namespace API.Tests.Parser +{ + public class ComicParserTests + { + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "Spider-Man & Wolverine")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")] + [InlineData("Batman & Daredevil - King of New York", "Batman & Daredevil - King of New York")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "Batman & Grendel")] + [InlineData("Batman & Robin the Teen Wonder #0", "Batman & Robin the Teen Wonder")] + [InlineData("Batman & Wildcat (1 of 3)", "Batman & Wildcat")] + [InlineData("Batman And Superman World's Finest #01", "Batman And Superman World's Finest")] + [InlineData("Babe 01", "Babe")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "Scott Pilgrim")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] + [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] + [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] + public void ParseComicSeriesTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); + } + + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "1")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "4")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "1")] + [InlineData("Batman & Daredevil - King of New York", "0")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] + [InlineData("Batman & Robin the Teen Wonder #0", "0")] + [InlineData("Batman & Wildcat (1 of 3)", "0")] + [InlineData("Batman And Superman World's Finest #01", "1")] + [InlineData("Babe 01", "1")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] + [InlineData("Superman v1 024 (09-10 1943)", "1")] + public void ParseComicVolumeTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); + } + + [Theory] + [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")] + [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] + [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] + [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")] + [InlineData("Batman & Daredevil - King of New York", "0")] + [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] + [InlineData("Batman & Robin the Teen Wonder #0", "0")] + [InlineData("Batman & Wildcat (1 of 3)", "1")] + [InlineData("Batman & Wildcat (2 of 3)", "2")] + [InlineData("Batman And Superman World's Finest #01", "0")] + [InlineData("Babe 01", "0")] + [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] + [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] + [InlineData("Superman v1 024 (09-10 1943)", "24")] + [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] + public void ParseComicChapterTest(string filename, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); + } + } +} \ No newline at end of file diff --git a/API.Tests/ParserTest.cs b/API.Tests/Parser/MangaParserTests.cs similarity index 71% rename from API.Tests/ParserTest.cs rename to API.Tests/Parser/MangaParserTests.cs index 3534e75f8..1b2d4500b 100644 --- a/API.Tests/ParserTest.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -1,18 +1,16 @@ -using System.Collections.Generic; +using System.Collections.Generic; using API.Entities.Enums; using API.Parser; using Xunit; using Xunit.Abstractions; -using static API.Parser.Parser; -namespace API.Tests +namespace API.Tests.Parser { - public class ParserTests + public class MangaParserTests { private readonly ITestOutputHelper _testOutputHelper; - - public ParserTests(ITestOutputHelper testOutputHelper) + public MangaParserTests(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; } @@ -61,9 +59,10 @@ namespace API.Tests [InlineData("Gantz.V26.cbz", "26")] [InlineData("NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar", "4")] [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "1")] + [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "4")] public void ParseVolumeTest(string filename, string expected) { - Assert.Equal(expected, ParseVolume(filename)); + Assert.Equal(expected, API.Parser.Parser.ParseVolume(filename)); } [Theory] @@ -132,9 +131,10 @@ namespace API.Tests [InlineData("Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1", "Umineko no Naku Koro ni")] [InlineData("Kimetsu no Yaiba - Digital Colored Comics c162 Three Victorious Stars.cbz", "Kimetsu no Yaiba - Digital Colored Comics")] [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "Amaenaideyo MS")] + [InlineData("NEEDLESS_Vol.4_-_Simeon_6_v2_[SugoiSugoi].rar", "NEEDLESS")] public void ParseSeriesTest(string filename, string expected) { - Assert.Equal(expected, ParseSeries(filename)); + Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); } [Theory] @@ -193,51 +193,9 @@ namespace API.Tests [InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "2")] public void ParseChaptersTest(string filename, string expected) { - Assert.Equal(expected, ParseChapter(filename)); - } - - - [Theory] - [InlineData("0001", "1")] - [InlineData("1", "1")] - [InlineData("0013", "13")] - public void RemoveLeadingZeroesTest(string input, string expected) - { - Assert.Equal(expected, RemoveLeadingZeroes(input)); + Assert.Equal(expected, API.Parser.Parser.ParseChapter(filename)); } - [Theory] - [InlineData("1", "001")] - [InlineData("10", "010")] - [InlineData("100", "100")] - [InlineData("4-8", "004-008")] - public void PadZerosTest(string input, string expected) - { - Assert.Equal(expected, PadZeros(input)); - } - - [Theory] - [InlineData("Hello_I_am_here", "Hello I am here")] - [InlineData("Hello_I_am_here ", "Hello I am here")] - [InlineData("[ReleaseGroup] The Title", "The Title")] - [InlineData("[ReleaseGroup]_The_Title", "The Title")] - [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", "Kasumi Otoko no Ko v1.1")] - public void CleanTitleTest(string input, string expected) - { - Assert.Equal(expected, CleanTitle(input)); - } - - [Theory] - [InlineData("test.cbz", true)] - [InlineData("test.cbr", true)] - [InlineData("test.zip", true)] - [InlineData("test.rar", true)] - [InlineData("test.rar.!qb", false)] - [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] - public void IsArchiveTest(string input, bool expected) - { - Assert.Equal(expected, IsArchive(input)); - } [Theory] [InlineData("Tenjou Tenge Omnibus", "Omnibus")] @@ -250,7 +208,7 @@ namespace API.Tests [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "Full Color")] public void ParseEditionTest(string input, string expected) { - Assert.Equal(expected, ParseEdition(input)); + Assert.Equal(expected, API.Parser.Parser.ParseEdition(input)); } [Theory] [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] @@ -260,151 +218,26 @@ namespace API.Tests [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)] [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)] [InlineData("Ani-Hina Art Collection.cbz", true)] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] public void ParseMangaSpecialTest(string input, bool expected) { - Assert.Equal(expected, ParseMangaSpecial(input) != ""); + Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); } - [Theory] - [InlineData("12-14", 12)] - [InlineData("24", 24)] - [InlineData("18-04", 4)] - [InlineData("18-04.5", 4.5)] - [InlineData("40", 40)] - public void MinimumNumberFromRangeTest(string input, float expected) - { - Assert.Equal(expected, MinimumNumberFromRange(input)); - } - - [Theory] - [InlineData("Darker Than Black", "darkerthanblack")] - [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] - [InlineData("Darker Than_Black", "darkerthanblack")] - [InlineData("", "")] - public void NormalizeTest(string input, string expected) - { - Assert.Equal(expected, Normalize(input)); - } - - [Theory] - [InlineData("01 Spider-Man & Wolverine 01.cbr", "Spider-Man & Wolverine")] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")] - [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")] - [InlineData("Batman & Daredevil - King of New York", "Batman & Daredevil - King of New York")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "Batman & Grendel")] - [InlineData("Batman & Robin the Teen Wonder #0", "Batman & Robin the Teen Wonder")] - [InlineData("Batman & Wildcat (1 of 3)", "Batman & Wildcat")] - [InlineData("Batman And Superman World's Finest #01", "Batman And Superman World's Finest")] - [InlineData("Babe 01", "Babe")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "Scott Pilgrim")] - [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")] - [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] - [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] - [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] - public void ParseComicSeriesTest(string filename, string expected) - { - Assert.Equal(expected, ParseComicSeries(filename)); - } - - [Theory] - [InlineData("01 Spider-Man & Wolverine 01.cbr", "1")] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "4")] - [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "1")] - [InlineData("Batman & Daredevil - King of New York", "0")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")] - [InlineData("Batman & Robin the Teen Wonder #0", "0")] - [InlineData("Batman & Wildcat (1 of 3)", "0")] - [InlineData("Batman And Superman World's Finest #01", "1")] - [InlineData("Babe 01", "1")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "1")] - [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] - [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] - [InlineData("Superman v1 024 (09-10 1943)", "1")] - public void ParseComicVolumeTest(string filename, string expected) - { - Assert.Equal(expected, ParseComicVolume(filename)); - } - - [Theory] - [InlineData("01 Spider-Man & Wolverine 01.cbr", "0")] - [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "0")] - [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "0")] - [InlineData("Batman & Catwoman - Trail of the Gun 01", "0")] - [InlineData("Batman & Daredevil - King of New York", "0")] - [InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "0")] - [InlineData("Batman & Robin the Teen Wonder #0", "0")] - [InlineData("Batman & Wildcat (1 of 3)", "1")] - [InlineData("Batman & Wildcat (2 of 3)", "2")] - [InlineData("Batman And Superman World's Finest #01", "0")] - [InlineData("Babe 01", "0")] - [InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", "0")] - [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] - [InlineData("Superman v1 024 (09-10 1943)", "24")] - public void ParseComicChapterTest(string filename, string expected) - { - Assert.Equal(expected, ParseComicChapter(filename)); - } - - [Theory] - [InlineData("test.jpg", true)] - [InlineData("test.jpeg", true)] - [InlineData("test.png", true)] - [InlineData(".test.jpg", false)] - [InlineData("!test.jpg", false)] - public void IsImageTest(string filename, bool expected) - { - Assert.Equal(expected, IsImage(filename)); - } - - [Theory] - [InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")] - [InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")] - [InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")] - public void FallbackTest(string rootDir, string inputPath, string expectedSeries) - { - var actual = Parse(inputPath, rootDir); - if (actual == null) - { - Assert.NotNull(actual); - return; - } - - Assert.Equal(expectedSeries, actual.Series); - } - - [Theory] - [InlineData("Love Hina - Special.jpg", false)] - [InlineData("folder.jpg", true)] - [InlineData("DearS_v01_cover.jpg", true)] - [InlineData("DearS_v01_covers.jpg", false)] - [InlineData("!cover.jpg", true)] - [InlineData("cover.jpg", true)] - [InlineData("cover.png", true)] - [InlineData("ch1/cover.png", true)] - public void IsCoverImageTest(string inputPath, bool expected) - { - Assert.Equal(expected, IsCoverImage(inputPath)); - } - - [Theory] - [InlineData("__MACOSX/Love Hina - Special.jpg", true)] - [InlineData("TEST/Love Hina - Special.jpg", false)] - [InlineData("__macosx/Love Hina/", false)] - [InlineData("MACOSX/Love Hina/", false)] - public void HasBlacklistedFolderInPathTest(string inputPath, bool expected) - { - Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); - } - [Theory] [InlineData("image.png", MangaFormat.Image)] [InlineData("image.cbz", MangaFormat.Archive)] [InlineData("image.txt", MangaFormat.Unknown)] public void ParseFormatTest(string inputFile, MangaFormat expected) { - Assert.Equal(expected, ParseFormat(inputFile)); + Assert.Equal(expected, API.Parser.Parser.ParseFormat(inputFile)); + } + + [Theory] + [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown].epub", "Side Stories")] + public void ParseSpecialTest(string inputFile, string expected) + { + Assert.Equal(expected, API.Parser.Parser.ParseMangaSpecial(inputFile)); } [Fact] @@ -496,7 +329,7 @@ namespace API.Tests foreach (var file in expected.Keys) { var expectedInfo = expected[file]; - var actual = Parse(file, rootPath); + var actual = API.Parser.Parser.Parse(file, rootPath); if (expectedInfo == null) { Assert.Null(actual); diff --git a/API.Tests/Parser/ParserInfoTests.cs b/API.Tests/Parser/ParserInfoTests.cs new file mode 100644 index 000000000..78b879de7 --- /dev/null +++ b/API.Tests/Parser/ParserInfoTests.cs @@ -0,0 +1,110 @@ +using API.Entities.Enums; +using API.Parser; +using Xunit; + +namespace API.Tests.Parser +{ + public class ParserInfoTests + { + [Fact] + public void MergeFromTest() + { + var p1 = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var p2 = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "Darker Than Black", + Volumes = "0" + }; + + var expected = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + p1.Merge(p2); + + AssertSame(expected, p1); + + } + + [Fact] + public void MergeFromTest2() + { + var p1 = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "0" + }; + + var p2 = new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = false, + Series = "darker than black", + Title = "Darker Than Black", + Volumes = "1" + }; + + var expected = new ParserInfo() + { + Chapters = "1", + Edition = "", + Format = MangaFormat.Archive, + FullFilePath = "/manga/darker than black.cbz", + IsSpecial = true, + Series = "darker than black", + Title = "darker than black", + Volumes = "1" + }; + p1.Merge(p2); + + AssertSame(expected, p1); + + } + + + private void AssertSame(ParserInfo expected, ParserInfo actual) + { + Assert.Equal(expected.Chapters, actual.Chapters); + Assert.Equal(expected.Volumes, actual.Volumes); + Assert.Equal(expected.Edition, actual.Edition); + Assert.Equal(expected.Filename, actual.Filename); + Assert.Equal(expected.Format, actual.Format); + Assert.Equal(expected.Series, actual.Series); + Assert.Equal(expected.IsSpecial, actual.IsSpecial); + Assert.Equal(expected.FullFilePath, actual.FullFilePath); + } + } +} \ No newline at end of file diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs new file mode 100644 index 000000000..2f46c6bb2 --- /dev/null +++ b/API.Tests/Parser/ParserTest.cs @@ -0,0 +1,192 @@ +using Xunit; +using static API.Parser.Parser; + +namespace API.Tests.Parser +{ + public class ParserTests + { + + [Theory] + [InlineData("0001", "1")] + [InlineData("1", "1")] + [InlineData("0013", "13")] + public void RemoveLeadingZeroesTest(string input, string expected) + { + Assert.Equal(expected, RemoveLeadingZeroes(input)); + } + + [Theory] + [InlineData("1", "001")] + [InlineData("10", "010")] + [InlineData("100", "100")] + public void PadZerosTest(string input, string expected) + { + Assert.Equal(expected, PadZeros(input)); + } + + [Theory] + [InlineData("Hello_I_am_here", "Hello I am here")] + [InlineData("Hello_I_am_here ", "Hello I am here")] + [InlineData("[ReleaseGroup] The Title", "The Title")] + [InlineData("[ReleaseGroup]_The_Title", "The Title")] + [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", "Kasumi Otoko no Ko v1.1")] + public void CleanTitleTest(string input, string expected) + { + Assert.Equal(expected, CleanTitle(input)); + } + + + // [Theory] + // //[InlineData("@font-face{font-family:\"PaytoneOne\";src:url(\"..\\/Fonts\\/PaytoneOne.ttf\")}", "@font-face{font-family:\"PaytoneOne\";src:url(\"PaytoneOne.ttf\")}")] + // [InlineData("@font-face{font-family:\"PaytoneOne\";src:url(\"..\\/Fonts\\/PaytoneOne.ttf\")}", "..\\/Fonts\\/PaytoneOne.ttf")] + // //[InlineData("@font-face{font-family:'PaytoneOne';src:url('..\\/Fonts\\/PaytoneOne.ttf')}", "@font-face{font-family:'PaytoneOne';src:url('PaytoneOne.ttf')}")] + // //[InlineData("@font-face{\r\nfont-family:'PaytoneOne';\r\nsrc:url('..\\/Fonts\\/PaytoneOne.ttf')\r\n}", "@font-face{font-family:'PaytoneOne';src:url('PaytoneOne.ttf')}")] + // public void ReplaceStyleUrlTest(string input, string expected) + // { + // var replacementStr = "PaytoneOne.ttf"; + // // TODO: Use Match to validate since replace is weird + // //Assert.Equal(expected, FontSrcUrlRegex.Replace(input, "$1" + replacementStr + "$2" + "$3")); + // var match = FontSrcUrlRegex.Match(input); + // Assert.Equal(!string.IsNullOrEmpty(expected), FontSrcUrlRegex.Match(input).Success); + // } + + + [Theory] + [InlineData("test.cbz", true)] + [InlineData("test.cbr", true)] + [InlineData("test.zip", true)] + [InlineData("test.rar", true)] + [InlineData("test.rar.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)] + public void IsArchiveTest(string input, bool expected) + { + Assert.Equal(expected, IsArchive(input)); + } + + [Theory] + [InlineData("test.epub", true)] + [InlineData("test.pdf", false)] + [InlineData("test.mobi", false)] + [InlineData("test.djvu", false)] + [InlineData("test.zip", false)] + [InlineData("test.rar", false)] + [InlineData("test.epub.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] + public void IsBookTest(string input, bool expected) + { + Assert.Equal(expected, IsBook(input)); + } + + [Theory] + [InlineData("test.epub", true)] + [InlineData("test.EPUB", true)] + [InlineData("test.mobi", false)] + [InlineData("test.epub.!qb", false)] + [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.ebub", false)] + public void IsEpubTest(string input, bool expected) + { + Assert.Equal(expected, IsEpub(input)); + } + + // [Theory] + // [InlineData("Tenjou Tenge Omnibus", "Omnibus")] + // [InlineData("Tenjou Tenge {Full Contact Edition}", "Full Contact Edition")] + // [InlineData("Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", "Full Contact Edition")] + // [InlineData("Wotakoi - Love is Hard for Otaku Omnibus v01 (2018) (Digital) (danke-Empire)", "Omnibus")] + // [InlineData("To Love Ru v01 Uncensored (Ch.001-007)", "Uncensored")] + // [InlineData("Chobits Omnibus Edition v01 [Dark Horse]", "Omnibus Edition")] + // [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "Digital Colored Comics")] + // [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "Full Color")] + // public void ParseEditionTest(string input, string expected) + // { + // Assert.Equal(expected, ParseEdition(input)); + // } + + // [Theory] + // [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)] + // [InlineData("Beelzebub_Omake_June_2012_RHS", true)] + // [InlineData("Beelzebub_Side_Story_02_RHS.zip", false)] + // [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)] + // [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)] + // [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)] + // [InlineData("Ani-Hina Art Collection.cbz", true)] + // public void ParseMangaSpecialTest(string input, bool expected) + // { + // Assert.Equal(expected, ParseMangaSpecial(input) != ""); + // } + + [Theory] + [InlineData("12-14", 12)] + [InlineData("24", 24)] + [InlineData("18-04", 4)] + [InlineData("18-04.5", 4.5)] + [InlineData("40", 40)] + public void MinimumNumberFromRangeTest(string input, float expected) + { + Assert.Equal(expected, MinimumNumberFromRange(input)); + } + + [Theory] + [InlineData("Darker Than Black", "darkerthanblack")] + [InlineData("Darker Than Black - Something", "darkerthanblacksomething")] + [InlineData("Darker Than_Black", "darkerthanblack")] + [InlineData("", "")] + public void NormalizeTest(string input, string expected) + { + Assert.Equal(expected, Normalize(input)); + } + + + + [Theory] + [InlineData("test.jpg", true)] + [InlineData("test.jpeg", true)] + [InlineData("test.png", true)] + [InlineData(".test.jpg", false)] + [InlineData("!test.jpg", false)] + public void IsImageTest(string filename, bool expected) + { + Assert.Equal(expected, IsImage(filename)); + } + + [Theory] + [InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")] + [InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")] + [InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")] + public void FallbackTest(string rootDir, string inputPath, string expectedSeries) + { + var actual = Parse(inputPath, rootDir); + if (actual == null) + { + Assert.NotNull(actual); + return; + } + + Assert.Equal(expectedSeries, actual.Series); + } + + [Theory] + [InlineData("Love Hina - Special.jpg", false)] + [InlineData("folder.jpg", true)] + [InlineData("DearS_v01_cover.jpg", true)] + [InlineData("DearS_v01_covers.jpg", false)] + [InlineData("!cover.jpg", true)] + [InlineData("cover.jpg", true)] + [InlineData("cover.png", true)] + [InlineData("ch1/cover.png", true)] + public void IsCoverImageTest(string inputPath, bool expected) + { + Assert.Equal(expected, IsCoverImage(inputPath)); + } + + [Theory] + [InlineData("__MACOSX/Love Hina - Special.jpg", true)] + [InlineData("TEST/Love Hina - Special.jpg", false)] + [InlineData("__macosx/Love Hina/", false)] + [InlineData("MACOSX/Love Hina/", false)] + public void HasBlacklistedFolderInPathTest(string inputPath, bool expected) + { + Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath)); + } + } +} \ No newline at end of file diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index 8cfa382f2..d907ab75a 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -1,9 +1,7 @@ -using System.Collections.ObjectModel; -using System.Diagnostics; +using System.Diagnostics; using System.IO; using System.IO.Compression; using API.Archive; -using API.Interfaces.Services; using API.Services; using Microsoft.Extensions.Logging; using NSubstitute; diff --git a/API.Tests/Services/BackupServiceTests.cs b/API.Tests/Services/BackupServiceTests.cs deleted file mode 100644 index 878b57c94..000000000 --- a/API.Tests/Services/BackupServiceTests.cs +++ /dev/null @@ -1,47 +0,0 @@ -using API.Interfaces; -using API.Services; -using API.Services.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using NSubstitute; - -namespace API.Tests.Services -{ - public class BackupServiceTests - { - private readonly DirectoryService _directoryService; - private readonly BackupService _backupService; - private readonly IUnitOfWork _unitOfWork = Substitute.For(); - private readonly ILogger _directoryLogger = Substitute.For>(); - private readonly ILogger _logger = Substitute.For>(); - private readonly IConfiguration _config; - - // public BackupServiceTests() - // { - // var inMemorySettings = new Dictionary { - // {"Logging:File:MaxRollingFiles", "0"}, - // {"Logging:File:Path", "file.log"}, - // }; - // - // _config = new ConfigurationBuilder() - // .AddInMemoryCollection(inMemorySettings) - // .Build(); - // - // //_config.GetMaxRollingFiles().Returns(0); - // //_config.GetLoggingFileName().Returns("file.log"); - // //var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BackupService/"); - // //Directory.GetCurrentDirectory().Returns(testDirectory); - // - // _directoryService = new DirectoryService(_directoryLogger); - // _backupService = new BackupService(_unitOfWork, _logger, _directoryService, _config); - // } - // - // [Fact] - // public void Test() - // { - // _backupService.BackupDatabase(); - // } - - - } -} \ No newline at end of file diff --git a/API.Tests/Services/BookServiceTests.cs b/API.Tests/Services/BookServiceTests.cs new file mode 100644 index 000000000..1a53af8fc --- /dev/null +++ b/API.Tests/Services/BookServiceTests.cs @@ -0,0 +1,32 @@ +using System.IO; +using API.Entities.Interfaces; +using API.Interfaces; +using API.Services; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; + +namespace API.Tests.Services +{ + public class BookServiceTests + { + private readonly IBookService _bookService; + private readonly ILogger _logger = Substitute.For>(); + + public BookServiceTests() + { + _bookService = new BookService(_logger); + } + + [Theory] + [InlineData("The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub", 16)] + [InlineData("Non-existent file.epub", 0)] + [InlineData("Non an ebub.pdf", 0)] + public void GetNumberOfPagesTest(string filePath, int expectedPages) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService/EPUB"); + Assert.Equal(expectedPages, _bookService.GetNumberOfPages(Path.Join(testDirectory, filePath))); + } + + } +} \ No newline at end of file diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs index 2072dae1f..410c43ade 100644 --- a/API.Tests/Services/CacheServiceTests.cs +++ b/API.Tests/Services/CacheServiceTests.cs @@ -41,7 +41,7 @@ // //[InlineData("", 0, "")] // public void GetCachedPagePathTest_Should() // { - // // TODO: Figure out how to test this + // // // string archivePath = "flat file.zip"; // // int pageNum = 0; // // string expected = "cache/1/pexels-photo-6551949.jpg"; diff --git a/API.Tests/Services/DirectoryServiceTests.cs b/API.Tests/Services/DirectoryServiceTests.cs index 7c21ae927..04108fd25 100644 --- a/API.Tests/Services/DirectoryServiceTests.cs +++ b/API.Tests/Services/DirectoryServiceTests.cs @@ -1,6 +1,8 @@ -using System.IO; +using System.Collections.Generic; +using System.IO; using System.Linq; using API.Services; +using API.Tests.Helpers; using Microsoft.Extensions.Logging; using NSubstitute; using Xunit; @@ -18,6 +20,18 @@ namespace API.Tests.Services _directoryService = new DirectoryService(_logger); } + [Theory] + [InlineData("Manga-testcase.txt", 28)] + public void GetFilesTest(string file, int expectedFileCount) + { + var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga"); + var files = new List(); + var fileCount = DirectoryService.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), + API.Parser.Parser.ArchiveFileExtensions, _logger); + + Assert.Equal(expectedFileCount, fileCount); + } + [Fact] public void GetFiles_WithCustomRegex_ShouldPass_Test() { diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index bc1ea6de0..22f54259c 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -1,69 +1,102 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Data.Common; +using System.IO; using System.Linq; +using System.Threading.Tasks; +using API.Data; using API.Entities; -using API.Extensions; using API.Interfaces; using API.Interfaces.Services; using API.Parser; using API.Services; using API.Services.Tasks; +using API.Tests.Helpers; +using AutoMapper; +using Microsoft.Data.Sqlite; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.Extensions.Logging; using NSubstitute; -using NSubstitute.Extensions; using Xunit; using Xunit.Abstractions; namespace API.Tests.Services { - public class ScannerServiceTests + public class ScannerServiceTests : IDisposable { private readonly ITestOutputHelper _testOutputHelper; private readonly ScannerService _scannerService; private readonly ILogger _logger = Substitute.For>(); - private readonly IUnitOfWork _unitOfWork = Substitute.For(); + private readonly IUnitOfWork _unitOfWork; private readonly IArchiveService _archiveService = Substitute.For(); + private readonly IBookService _bookService = Substitute.For(); private readonly IMetadataService _metadataService; private readonly ILogger _metadataLogger = Substitute.For>(); - private Library _libraryMock; + + private readonly DbConnection _connection; + private readonly DataContext _context; + public ScannerServiceTests(ITestOutputHelper testOutputHelper) { - _testOutputHelper = testOutputHelper; - _scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); - _metadataService= Substitute.For(_unitOfWork, _metadataLogger, _archiveService); - // _libraryMock = new Library() - // { - // Id = 1, - // Name = "Manga", - // Folders = new List() - // { - // new FolderPath() - // { - // Id = 1, - // LastScanned = DateTime.Now, - // LibraryId = 1, - // Path = "E:/Manga" - // } - // }, - // LastModified = DateTime.Now, - // Series = new List() - // { - // new Series() - // { - // Id = 0, - // Name = "Darker Than Black" - // } - // } - // }; + var contextOptions = new DbContextOptionsBuilder() + .UseSqlite(CreateInMemoryDatabase()) + .Options; + _connection = RelationalOptionsExtension.Extract(contextOptions).Connection; + + _context = new DataContext(contextOptions); + Task.Run(SeedDb).GetAwaiter().GetResult(); + + //BackgroundJob.Enqueue is what I need to mock or something (it's static...) + // ICacheService cacheService, ILogger logger, IScannerService scannerService, + // IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService, + // IBackgroundJobClient jobClient + //var taskScheduler = new TaskScheduler(Substitute.For(), Substitute.For>(), Substitute.For<) + + + // Substitute.For>() - Not needed because only for UserService + _unitOfWork = new UnitOfWork(_context, Substitute.For(), null, + Substitute.For>()); + + + _testOutputHelper = testOutputHelper; + _metadataService= Substitute.For(_unitOfWork, _metadataLogger, _archiveService, _bookService); + _scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService, _bookService); } + private async Task SeedDb() + { + await _context.Database.MigrateAsync(); + await Seed.SeedSettings(_context); + + _context.Library.Add(new Library() + { + Name = "Manga", + Folders = new List() + { + new FolderPath() + { + Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga") + } + } + }); + return await _context.SaveChangesAsync() > 0; + } + + // [Fact] + // public void Test() + // { + // _scannerService.ScanLibrary(1, false); + // + // var series = _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Result.Series; + // } + [Fact] public void FindSeriesNotOnDisk_Should_RemoveNothing_Test() { - var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); var infos = new Dictionary>(); AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"}); @@ -76,38 +109,36 @@ namespace API.Tests.Services Name = "Cage of Eden", LocalizedName = "Cage of Eden", OriginalName = "Cage of Eden", - NormalizedName = Parser.Parser.Normalize("Cage of Eden") + NormalizedName = API.Parser.Parser.Normalize("Cage of Eden") }); existingSeries.Add(new Series() { Name = "Darker Than Black", LocalizedName = "Darker Than Black", OriginalName = "Darker Than Black", - NormalizedName = Parser.Parser.Normalize("Darker Than Black") + NormalizedName = API.Parser.Parser.Normalize("Darker Than Black") }); var expectedSeries = new List(); - Assert.Empty(scannerService.FindSeriesNotOnDisk(existingSeries, infos)); + Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos)); } [Theory] [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")] [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")] - [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker Than Black!")] + [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")] [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")] public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected) { - var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService); - var collectedSeries = new ConcurrentDictionary>(); foreach (var seriesName in existingSeriesNames) { AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName}); } - var actualName = scannerService.MergeName(collectedSeries, new ParserInfo() + var actualName = _scannerService.MergeName(collectedSeries, new ParserInfo() { Series = parsedInfoName }); @@ -115,6 +146,25 @@ namespace API.Tests.Services Assert.Equal(expected, actualName); } + [Fact] + public void RemoveMissingSeries_Should_RemoveSeries() + { + var existingSeries = new List() + { + EntityFactory.CreateSeries("Darker than Black Vol 1"), + EntityFactory.CreateSeries("Darker than Black"), + EntityFactory.CreateSeries("Beastars"), + }; + var missingSeries = new List() + { + EntityFactory.CreateSeries("Darker than Black Vol 1"), + }; + existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList(); + + Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name)); + Assert.Equal(missingSeries.Count, removeCount); + } + private void AddToParsedInfo(IDictionary> collectedSeries, ParserInfo info) { if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>)) @@ -209,5 +259,16 @@ namespace API.Tests.Services // _testOutputHelper.WriteLine(_libraryMock.ToString()); Assert.True(true); } + + private static DbConnection CreateInMemoryDatabase() + { + var connection = new SqliteConnection("Filename=:memory:"); + + connection.Open(); + + return connection; + } + + public void Dispose() => _connection.Dispose(); } } \ No newline at end of file diff --git a/API.Tests/Services/Test Data/BookService/EPUB/The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub b/API.Tests/Services/Test Data/BookService/EPUB/The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub new file mode 100644 index 000000000..7388bc85e Binary files /dev/null and b/API.Tests/Services/Test Data/BookService/EPUB/The Golden Harpoon; Or, Lost Among the Floes A Story of the Whaling Grounds.epub differ diff --git a/API.Tests/Services/Test Data/DirectoryService/TestCases/Manga-testcase.txt b/API.Tests/Services/Test Data/DirectoryService/TestCases/Manga-testcase.txt new file mode 100644 index 000000000..9aebe3e44 --- /dev/null +++ b/API.Tests/Services/Test Data/DirectoryService/TestCases/Manga-testcase.txt @@ -0,0 +1,153 @@ +\A Town Where You Live\A Town Where You Live Vol. 01.zip +\A Town Where You Live\A Town Where You Live Vol. 02.zip +\A Town Where You Live\A Town Where You Live Vol. 03.zip +\A Town Where You Live\A Town Where You Live Vol. 04.zip +\A Town Where You Live\A Town Where You Live Vol. 05.zip +\A Town Where You Live\A Town Where You Live Vol. 06.zip +\A Town Where You Live\A Town Where You Live Vol. 07.zip +\A Town Where You Live\A Town Where You Live Vol. 08.zip +\A Town Where You Live\A Town Where You Live Vol. 09.zip +\A Town Where You Live\A Town Where You Live Vol. 10.zip +\A Town Where You Live\A Town Where You Live Vol. 11.zip +\A Town Where You Live\A Town Where You Live Vol. 12.zip +\A Town Where You Live\A Town Where You Live Vol. 13.zip +\A Town Where You Live\A Town Where You Live Vol. 14.zip +\A Town Where You Live\A Town Where You Live Vol. 15.zip +\A Town Where You Live\A Town Where You Live Vol. 16.zip +\A Town Where You Live\A Town Where You Live Vol. 17.zip +\A Town Where You Live\A Town Where You Live Vol. 18.zip +\A Town Where You Live\A Town Where You Live Vol. 19.zip +\A Town Where You Live\A Town Where You Live Vol. 20.zip +\A Town Where You Live\A Town Where You Live Vol. 21.zip +\A Town Where You Live\A Town Where You Live Vol. 22.zip +\A Town Where You Live\A Town Where You Live Vol. 23.zip +\A Town Where You Live\A Town Where You Live Vol. 24.zip +\A Town Where You Live\A Town Where You Live Vol. 25.zip +\A Town Where You Live\A Town Where You Live Vol. 26.zip +\A Town Where You Live\A Town Where You Live Vol. 27.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Bonus Chapter.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Princess Lucia Collaboration.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Fantasy.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Youth's Acne.zip +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v01 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v02 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v03 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v04 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v05 (2019) (Digital) (danke-Empire).cbz +\Aiki\Aiki V01.cbz +\Aiki\Aiki V02.cbz +\Aiki\Aiki V03.cbz +\Aiki\Aiki V04.cbz +\Aiki\Aiki V05.cbz +\Aiki\Aiki V06.cbz +\Aiki\Aiki V07.cbz +\Aiki\Aiki V08.cbz +\Aiki\Aiki V09.cbz +\Aiki\Aiki V10.cbz +\Aiki\Aiki V11.cbz +\Aiki\Aiki V12.cbz +\Aiki\Aiki V13.cbz +\Aiki\Aiki V14.cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074.5 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 076 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 077 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 078 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 079 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 080 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 081 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 082 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 084 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 085 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 086 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v01 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v02 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v03 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v04 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v05 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v06 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v07 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v08 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v09 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v10 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v11 (2018) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v12 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v13 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v14 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v15 (2020) (Digital) (danke-Empire).cbz +\Akame ga KILL!\Akame ga KILL! v01 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v02 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v03 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v04 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v05 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v06 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v07 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v08 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v09 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v10 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v11 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v12 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v13 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v14 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v15 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v02 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v03 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v04 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v05 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v06 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v07 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v08 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v09 (2019) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v10 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v01 (2019) (F) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v02 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v03 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v04 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v05 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v06 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v07 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v08 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v09.cbz +\Beastars\BEASTARS v10.cbz +\Beastars\BEASTARS v11.cbz +\Beastars\BEASTARS v12.cbz +\Beastars\BEASTARS v13.cbz +\Beastars\BEASTARS v14.cbz +\Beastars\BEASTARS v15.cbz +\Beastars\BEASTARS v16.cbz +\Beastars\BEASTARS v17.cbz +\Beastars\BEASTARS v18.cbz +\Beastars\BEASTARS v19.cbz +\Beastars\BEASTARS v20.cbz +\Beastars\BEASTARS v21.cbz +\Black Bullet\Black Bullet - v4 c17 [batoto].zip +\Black Bullet\Black Bullet - v4 c17.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c18 [batoto].zip +\Black Bullet\Black Bullet - v4 c18.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c19 [batoto].zip +\Black Bullet\Black Bullet - v4 c19.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c20 [batoto].zip +\Black Bullet\Black Bullet - v4 c20.5 [batoto].zip +\Black Bullet\Black Bullet v01 c01.rar +\Black Bullet\Black Bullet v01 c02.rar +\Black Bullet\Black Bullet v01 c03.rar +\Black Bullet\Black Bullet v01 c04.rar +\Black Bullet\Black Bullet v01 c05.rar +\Black Bullet\Black Bullet v01 c06.rar +\Black Bullet\Black Bullet v01 c07.rar +\Black Bullet\Black Bullet v01 c08.rar +\Black Bullet\Black Bullet v01 c09.5.rar +\Black Bullet\Black Bullet v01 c09.rar +\Black Bullet\Black Bullet v01 c10.rar +\Black Bullet\Black Bullet v01 c11.zip +\Black Bullet\Black Bullet v01 c12.5.rar +\Black Bullet\Black Bullet v01 c12.rar +\Black Bullet\Black Bullet v01 c13.rar +\Black Bullet\Black Bullet v01 c14.rar +\Black Bullet\Black Bullet v01 c15.rar +\Black Bullet\Black Bullet v01 c16.rar diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_omake.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_omake.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v01.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v01.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v02.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v02.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v03.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v03.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v04.zip b/API.Tests/Services/Test Data/ScannerService/Manga/A Town Where You Live/A_Town_Where_You_Live_v04.zip new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v01 (digital).cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v01 (digital).cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v02.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v02.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v03.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v03.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v04.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v04.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v05.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v05.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v06.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v06.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v07.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v07.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v08.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v08.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v09.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v09.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v10.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v10.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v11.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v11.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v12.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v12.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v13.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v13.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v14.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v14.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v15.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BEASTARS/BEASTARS v15.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v01.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v01.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v02.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v02.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v03.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v03.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v04.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v04.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v05.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v05.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v06.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v06.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v07.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v07.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v10.cbz b/API.Tests/Services/Test Data/ScannerService/Manga/BTOOOM!/Btooom! v10.cbz new file mode 100644 index 000000000..e69de29bb diff --git a/API.Tests/Services/Test Data/ScannerService/TestCases/Manga-testcase.txt b/API.Tests/Services/Test Data/ScannerService/TestCases/Manga-testcase.txt new file mode 100644 index 000000000..9aebe3e44 --- /dev/null +++ b/API.Tests/Services/Test Data/ScannerService/TestCases/Manga-testcase.txt @@ -0,0 +1,153 @@ +\A Town Where You Live\A Town Where You Live Vol. 01.zip +\A Town Where You Live\A Town Where You Live Vol. 02.zip +\A Town Where You Live\A Town Where You Live Vol. 03.zip +\A Town Where You Live\A Town Where You Live Vol. 04.zip +\A Town Where You Live\A Town Where You Live Vol. 05.zip +\A Town Where You Live\A Town Where You Live Vol. 06.zip +\A Town Where You Live\A Town Where You Live Vol. 07.zip +\A Town Where You Live\A Town Where You Live Vol. 08.zip +\A Town Where You Live\A Town Where You Live Vol. 09.zip +\A Town Where You Live\A Town Where You Live Vol. 10.zip +\A Town Where You Live\A Town Where You Live Vol. 11.zip +\A Town Where You Live\A Town Where You Live Vol. 12.zip +\A Town Where You Live\A Town Where You Live Vol. 13.zip +\A Town Where You Live\A Town Where You Live Vol. 14.zip +\A Town Where You Live\A Town Where You Live Vol. 15.zip +\A Town Where You Live\A Town Where You Live Vol. 16.zip +\A Town Where You Live\A Town Where You Live Vol. 17.zip +\A Town Where You Live\A Town Where You Live Vol. 18.zip +\A Town Where You Live\A Town Where You Live Vol. 19.zip +\A Town Where You Live\A Town Where You Live Vol. 20.zip +\A Town Where You Live\A Town Where You Live Vol. 21.zip +\A Town Where You Live\A Town Where You Live Vol. 22.zip +\A Town Where You Live\A Town Where You Live Vol. 23.zip +\A Town Where You Live\A Town Where You Live Vol. 24.zip +\A Town Where You Live\A Town Where You Live Vol. 25.zip +\A Town Where You Live\A Town Where You Live Vol. 26.zip +\A Town Where You Live\A Town Where You Live Vol. 27.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Bonus Chapter.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Princess Lucia Collaboration.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Fantasy.zip +\A Town Where You Live\A Town Where You Live - Post Volume 27\A Town Where You Live - Special Youth's Acne.zip +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v01 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v02 (2018) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v03 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v04 (2019) (Digital) (danke-Empire).cbz +\Accomplishments of the Duke's Daughter\Accomplishments of the Duke's Daughter v05 (2019) (Digital) (danke-Empire).cbz +\Aiki\Aiki V01.cbz +\Aiki\Aiki V02.cbz +\Aiki\Aiki V03.cbz +\Aiki\Aiki V04.cbz +\Aiki\Aiki V05.cbz +\Aiki\Aiki V06.cbz +\Aiki\Aiki V07.cbz +\Aiki\Aiki V08.cbz +\Aiki\Aiki V09.cbz +\Aiki\Aiki V10.cbz +\Aiki\Aiki V11.cbz +\Aiki\Aiki V12.cbz +\Aiki\Aiki V13.cbz +\Aiki\Aiki V14.cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 074.5 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 075.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 076 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 077 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 078 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 079 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 080 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 081 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 082 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 083.5 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 084 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 085 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human 086 (2021) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v01 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v02 (2014) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v03 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v04 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v05 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v06 (2015) (Digital) (LostNerevarine-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v07 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v08 (2016) (Digital) (Hexer-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v09 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v10 (2017) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v11 (2018) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v12 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v13 (2019) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v14 (2020) (Digital) (danke-Empire).cbz +\Ajin - Demi-Human\Ajin - Demi-Human v15 (2020) (Digital) (danke-Empire).cbz +\Akame ga KILL!\Akame ga KILL! v01 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v02 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v03 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v04 (2015) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v05 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v06 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v07 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v08 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v09 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v10 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v11 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v12 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v13 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v14 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL!\Akame ga KILL! v15 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v02 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v03 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v04 (2016) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v05 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v06 (2017) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v07 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v08 (2018) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v09 (2019) (Digital) (LuCaZ).cbz +\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v10 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v01 (2019) (F) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v02 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v03 (2019) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v04 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v05 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v06 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v07 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v08 (2020) (Digital) (LuCaZ).cbz +\Beastars\BEASTARS v09.cbz +\Beastars\BEASTARS v10.cbz +\Beastars\BEASTARS v11.cbz +\Beastars\BEASTARS v12.cbz +\Beastars\BEASTARS v13.cbz +\Beastars\BEASTARS v14.cbz +\Beastars\BEASTARS v15.cbz +\Beastars\BEASTARS v16.cbz +\Beastars\BEASTARS v17.cbz +\Beastars\BEASTARS v18.cbz +\Beastars\BEASTARS v19.cbz +\Beastars\BEASTARS v20.cbz +\Beastars\BEASTARS v21.cbz +\Black Bullet\Black Bullet - v4 c17 [batoto].zip +\Black Bullet\Black Bullet - v4 c17.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c18 [batoto].zip +\Black Bullet\Black Bullet - v4 c18.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c19 [batoto].zip +\Black Bullet\Black Bullet - v4 c19.5 [batoto].zip +\Black Bullet\Black Bullet - v4 c20 [batoto].zip +\Black Bullet\Black Bullet - v4 c20.5 [batoto].zip +\Black Bullet\Black Bullet v01 c01.rar +\Black Bullet\Black Bullet v01 c02.rar +\Black Bullet\Black Bullet v01 c03.rar +\Black Bullet\Black Bullet v01 c04.rar +\Black Bullet\Black Bullet v01 c05.rar +\Black Bullet\Black Bullet v01 c06.rar +\Black Bullet\Black Bullet v01 c07.rar +\Black Bullet\Black Bullet v01 c08.rar +\Black Bullet\Black Bullet v01 c09.5.rar +\Black Bullet\Black Bullet v01 c09.rar +\Black Bullet\Black Bullet v01 c10.rar +\Black Bullet\Black Bullet v01 c11.zip +\Black Bullet\Black Bullet v01 c12.5.rar +\Black Bullet\Black Bullet v01 c12.rar +\Black Bullet\Black Bullet v01 c13.rar +\Black Bullet\Black Bullet v01 c14.rar +\Black Bullet\Black Bullet v01 c15.rar +\Black Bullet\Black Bullet v01 c16.rar diff --git a/API.Tests/generate_test_data.py b/API.Tests/generate_test_data.py new file mode 100644 index 000000000..69652969a --- /dev/null +++ b/API.Tests/generate_test_data.py @@ -0,0 +1,80 @@ +""" This script should be run on a directory which will generate a test case file + that can be loaded into the renametest.py""" +import os +from pathlib import Path +import shutil + +verbose = False + +def print_log(val): + if verbose: + print(val) + + +def create_test_base(file, root_dir): + """ Creates and returns a new base directory for data creation for a given testcase.""" + base_dir = os.path.split(file.split('-testcase.txt')[0])[-1] + print_log('base_dir: {0}'.format(base_dir)) + new_dir = os.path.join(root_dir, base_dir) + print_log('new dir: {0}'.format(new_dir)) + p = Path(new_dir) + if not p.exists(): + os.mkdir(new_dir) + + return new_dir + + + +def generate_data(file, root_dir): + ''' Generates directories and fake files for testing against ''' + + base_dir = '' + if file.endswith('-testcase.txt'): + base_dir = create_test_base(file, root_dir) + + files_to_create = [] + with open(file, 'r') as in_file: + files_to_create = in_file.read().splitlines() + + for filepath in files_to_create: + for part in os.path.split(filepath): + part_path = os.path.join(base_dir, part) + print_log('Checking if {0} exists '.format(part_path)) + p = Path(part_path) + + if not p.exists(): + print_log('Creating: {0}'.format(part)) + + if p.suffix != '': + with open(os.path.join(root_dir, base_dir + '/' + filepath), 'w+') as f: + f.write('') + else: + os.mkdir(part_path) + +def clean_up_generated_data(root_dir): + for root, dirs, files in os.walk(root_dir): + for dir in dirs: + shutil.rmtree(os.path.join(root, dir)) + for file in files: + if not file.endswith('-testcase.txt'): + print_log('Removing {0}'.format(os.path.join(root, file))) + os.remove(os.path.join(root, file)) + + +def generate_test_file(): + root_dir = os.path.abspath('.') + current_folder = os.path.split(root_dir)[-1] + out_files = [] + for root, _, files in os.walk(root_dir): + for file in files: + if not file.endswith('-testcase.txt'): + filename = os.path.join(root.replace(root_dir, ''), file) # root_dir or root_dir + '//'? + out_files.append(filename) + + with open(os.path.join(root_dir, current_folder + '-testcase.txt'), 'w+') as f: + for filename in out_files: + f.write(filename + '\n') + +if __name__ == '__main__': + verbose = True + generate_test_file() \ No newline at end of file diff --git a/API/.dockerignore b/API/.dockerignore new file mode 100644 index 000000000..cd967fc3a --- /dev/null +++ b/API/.dockerignore @@ -0,0 +1,25 @@ +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/.idea +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/azds.yaml +**/bin +**/charts +**/docker-compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md \ No newline at end of file diff --git a/API/API.csproj b/API/API.csproj index 465284979..87fa2f0d0 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -4,6 +4,7 @@ Default net5.0 true + Linux @@ -12,10 +13,12 @@ + + @@ -36,6 +39,7 @@ + diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs new file mode 100644 index 000000000..b7589e6dd --- /dev/null +++ b/API/Controllers/BookController.cs @@ -0,0 +1,220 @@ +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using API.DTOs; +using API.Entities.Interfaces; +using API.Extensions; +using API.Interfaces; +using API.Services; +using HtmlAgilityPack; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using VersOne.Epub; + +namespace API.Controllers +{ + public class BookController : BaseApiController + { + private readonly ILogger _logger; + private readonly IBookService _bookService; + private readonly IUnitOfWork _unitOfWork; + private static readonly string BookApiUrl = "book-resources?file="; + + + public BookController(ILogger logger, IBookService bookService, IUnitOfWork unitOfWork) + { + _logger = logger; + _bookService = bookService; + _unitOfWork = unitOfWork; + } + + [HttpGet("{chapterId}/book-info")] + public async Task> GetBookInfo(int chapterId) + { + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + + return book.Title; + } + + [HttpGet("{chapterId}/book-resources")] + public async Task GetBookPageResources(int chapterId, [FromQuery] string file) + { + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + + var key = BookService.CleanContentKeys(file); + if (!book.Content.AllFiles.ContainsKey(key)) return BadRequest("File was not found in book"); + + var bookFile = book.Content.AllFiles[key]; + var content = await bookFile.ReadContentAsBytesAsync(); + Response.AddCacheHeader(content); + var contentType = BookService.GetContentType(bookFile.ContentType); + return File(content, contentType, $"{chapterId}-{file}"); + } + + [HttpGet("{chapterId}/chapters")] + public async Task>> GetBookChapters(int chapterId) + { + // This will return a list of mappings from ID -> pagenum. ID will be the xhtml key and pagenum will be the reading order + // this is used to rewrite anchors in the book text so that we always load properly in FE + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + var mappings = await _bookService.CreateKeyToPageMappingAsync(book); + + var navItems = await book.GetNavigationAsync(); + var chaptersList = new List(); + + foreach (var navigationItem in navItems) + { + if (navigationItem.NestedItems.Count > 0) + { + _logger.LogDebug("Header: {Header}", navigationItem.Title); + var nestedChapters = new List(); + + foreach (var nestedChapter in navigationItem.NestedItems) + { + if (nestedChapter.Link == null) continue; + var key = BookService.CleanContentKeys(nestedChapter.Link.ContentFileName); + if (mappings.ContainsKey(key)) + { + nestedChapters.Add(new BookChapterItem() + { + Title = nestedChapter.Title, + Page = mappings[key], + Part = nestedChapter.Link.Anchor ?? string.Empty, + Children = new List() + }); + } + } + + if (navigationItem.Link == null) + { + var item = new BookChapterItem() + { + Title = navigationItem.Title, + Children = nestedChapters + }; + if (nestedChapters.Count > 0) + { + item.Page = nestedChapters[0].Page; + } + chaptersList.Add(item); + } + else + { + var groupKey = BookService.CleanContentKeys(navigationItem.Link.ContentFileName); + if (mappings.ContainsKey(groupKey)) + { + chaptersList.Add(new BookChapterItem() + { + Title = navigationItem.Title, + Page = mappings[groupKey], + Children = nestedChapters + }); + } + } + } + } + return Ok(chaptersList); + } + + [HttpGet("{chapterId}/book-page")] + public async Task> GetBookPage(int chapterId, [FromQuery] int page) + { + var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(chapterId); + + var book = await EpubReader.OpenBookAsync(chapter.Files.ElementAt(0).FilePath); + var mappings = await _bookService.CreateKeyToPageMappingAsync(book); + + var counter = 0; + var doc = new HtmlDocument(); + var baseUrl = Request.Scheme + "://" + Request.Host + Request.PathBase + "/api/"; + var apiBase = baseUrl + "book/" + chapterId + "/" + BookApiUrl; + var bookPages = await book.GetReadingOrderAsync(); + foreach (var contentFileRef in bookPages) + { + if (page == counter) + { + var content = await contentFileRef.ReadContentAsync(); + if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content); + + doc.LoadHtml(content); + var body = doc.DocumentNode.SelectSingleNode("/html/body"); + + var inlineStyles = doc.DocumentNode.SelectNodes("//style"); + if (inlineStyles != null) + { + foreach (var inlineStyle in inlineStyles) + { + var styleContent = await _bookService.ScopeStyles(inlineStyle.InnerHtml, apiBase); + body.PrependChild(HtmlNode.CreateNode($"")); + } + } + + var styleNodes = doc.DocumentNode.SelectNodes("/html/head/link"); + if (styleNodes != null) + { + foreach (var styleLinks in styleNodes) + { + var key = BookService.CleanContentKeys(styleLinks.Attributes["href"].Value); + var styleContent = await _bookService.ScopeStyles(await book.Content.Css[key].ReadContentAsync(), apiBase); + body.PrependChild(HtmlNode.CreateNode($"")); + } + } + + var anchors = doc.DocumentNode.SelectNodes("//a"); + if (anchors != null) + { + foreach (var anchor in anchors) + { + BookService.UpdateLinks(anchor, mappings, page); + } + } + + var images = doc.DocumentNode.SelectNodes("//img"); + if (images != null) + { + foreach (var image in images) + { + if (image.Name != "img") continue; + + // Need to do for xlink:href + if (image.Attributes["src"] != null) + { + var imageFile = image.Attributes["src"].Value; + image.Attributes.Remove("src"); + image.Attributes.Add("src", $"{apiBase}" + imageFile); + } + } + } + + images = doc.DocumentNode.SelectNodes("//image"); + if (images != null) + { + foreach (var image in images) + { + if (image.Name != "image") continue; + + if (image.Attributes["xlink:href"] != null) + { + var imageFile = image.Attributes["xlink:href"].Value; + image.Attributes.Remove("xlink:href"); + image.Attributes.Add("xlink:href", $"{apiBase}" + imageFile); + } + } + } + + + + + return Ok(body.InnerHtml); + } + + counter++; + } + + return BadRequest("Could not find the appropriate html for that page"); + } + } +} \ No newline at end of file diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index c1b6df2b8..4867be3d8 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -5,8 +5,8 @@ using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Entities.Enums; using API.Extensions; -using API.Helpers; using API.Interfaces; using API.Interfaces.Services; using AutoMapper; @@ -223,5 +223,11 @@ namespace API.Controllers return Ok(series); } + + [HttpGet("type")] + public async Task> GetLibraryType(int libraryId) + { + return Ok(await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(libraryId)); + } } } \ No newline at end of file diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 491b08a17..e3927146b 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -46,7 +46,7 @@ namespace API.Controllers return File(content, "image/" + format); } - + [HttpGet("chapter-path")] public async Task> GetImagePath(int chapterId) { diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index d149aa0d4..33565af56 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -105,6 +105,13 @@ namespace API.Controllers return Ok(CronConverter.Options); } + [Authorize(Policy = "RequireAdminRole")] + [HttpGet("library-types")] + public ActionResult> GetLibraryTypes() + { + return Ok(Enum.GetNames(typeof(LibraryType))); + } + [Authorize(Policy = "RequireAdminRole")] [HttpGet("log-levels")] public ActionResult> GetLogLevels() diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index 607f508e9..b51706217 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -38,6 +38,14 @@ namespace API.Controllers return Ok(await _unitOfWork.UserRepository.GetMembersAsync()); } + [HttpGet("has-reading-progress")] + public async Task> HasReadingProgress(int libraryId) + { + var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId); + var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); + return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, user.Id)); + } + [HttpGet("has-library-access")] public async Task> HasLibraryAccess(int libraryId) { @@ -53,7 +61,11 @@ namespace API.Controllers existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; existingPreferences.ScalingOption = preferencesDto.ScalingOption; existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; - existingPreferences.HideReadOnDetails = preferencesDto.HideReadOnDetails; + existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; + existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; + existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; + existingPreferences.BookReaderDarkMode = preferencesDto.BookReaderDarkMode; + existingPreferences.BookReaderFontSize = preferencesDto.BookReaderFontSize; _unitOfWork.UserRepository.Update(existingPreferences); diff --git a/API/DTOs/BookChapterItem.cs b/API/DTOs/BookChapterItem.cs new file mode 100644 index 000000000..68d1fce40 --- /dev/null +++ b/API/DTOs/BookChapterItem.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; + +namespace API.DTOs +{ + public class BookChapterItem + { + /// + /// Name of the Chapter + /// + public string Title { get; set; } + /// + /// A part represents the id of the anchor so we can scroll to it. 01_values.xhtml#h_sVZPaxUSy/ + /// + public string Part { get; set; } + /// + /// Page Number to load for the chapter + /// + public int Page { get; set; } + public ICollection Children { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/ChapterDto.cs b/API/DTOs/ChapterDto.cs index 66934d040..4dcabee33 100644 --- a/API/DTOs/ChapterDto.cs +++ b/API/DTOs/ChapterDto.cs @@ -22,6 +22,10 @@ namespace API.DTOs /// public bool IsSpecial { get; init; } /// + /// Used for books/specials to display custom title. For non-specials/books, will be set to + /// + public string Title { get; init; } + /// /// The files that represent this Chapter /// public ICollection Files { get; init; } diff --git a/API/DTOs/UserPreferencesDto.cs b/API/DTOs/UserPreferencesDto.cs index bec209a5b..ea2a563e2 100644 --- a/API/DTOs/UserPreferencesDto.cs +++ b/API/DTOs/UserPreferencesDto.cs @@ -7,9 +7,10 @@ namespace API.DTOs public ReadingDirection ReadingDirection { get; set; } public ScalingOption ScalingOption { get; set; } public PageSplitOption PageSplitOption { get; set; } - /// - /// Whether UI hides read Volumes on Details page - /// - public bool HideReadOnDetails { get; set; } + public bool BookReaderDarkMode { get; set; } = false; + public int BookReaderMargin { get; set; } + public int BookReaderLineSpacing { get; set; } + public int BookReaderFontSize { get; set; } + public string BookReaderFontFamily { get; set; } } } \ No newline at end of file diff --git a/API/Data/AppUserProgressRepository.cs b/API/Data/AppUserProgressRepository.cs index a65ab2c48..38912b589 100644 --- a/API/Data/AppUserProgressRepository.cs +++ b/API/Data/AppUserProgressRepository.cs @@ -1,5 +1,6 @@ using System.Linq; using System.Threading.Tasks; +using API.Entities.Enums; using API.Interfaces; using Microsoft.EntityFrameworkCore; @@ -28,5 +29,28 @@ namespace API.Data _context.RemoveRange(rowsToRemove); return await _context.SaveChangesAsync() > 0 ? rowsToRemove.Count : 0; } + + /// + /// Checks if user has any progress against a library of passed type + /// + /// + /// + /// + public async Task UserHasProgress(LibraryType libraryType, int userId) + { + var seriesIds = await _context.AppUserProgresses + .Where(aup => aup.PagesRead > 0 && aup.AppUserId == userId) + .AsNoTracking() + .Select(aup => aup.SeriesId) + .ToListAsync(); + + if (seriesIds.Count == 0) return false; + + return await _context.Series + .Include(s => s.Library) + .Where(s => seriesIds.Contains(s.Id) && s.Library.Type == libraryType) + .AsNoTracking() + .AnyAsync(); + } } } \ No newline at end of file diff --git a/API/Data/DbFactory.cs b/API/Data/DbFactory.cs new file mode 100644 index 000000000..3589fc30e --- /dev/null +++ b/API/Data/DbFactory.cs @@ -0,0 +1,54 @@ +using System.Collections.Generic; +using API.Entities; +using API.Entities.Enums; +using API.Parser; +using API.Services.Tasks; + +namespace API.Data +{ + /// + /// Responsible for creating Series, Volume, Chapter, MangaFiles for use in + /// + public static class DbFactory + { + public static Series Series(string name) + { + return new () + { + Name = name, + OriginalName = name, + LocalizedName = name, + NormalizedName = Parser.Parser.Normalize(name), + SortName = name, + Summary = string.Empty, + Volumes = new List() + }; + } + + public static Volume Volume(string volumeNumber) + { + return new Volume() + { + Name = volumeNumber, + Number = (int) Parser.Parser.MinimumNumberFromRange(volumeNumber), + Chapters = new List() + }; + } + + public static Chapter Chapter(ParserInfo info) + { + var specialTreatment = info.IsSpecialInfo(); + var specialTitle = specialTreatment ? info.Filename : info.Chapters; + return new Chapter() + { + Number = specialTreatment ? "0" : Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty, + Range = specialTreatment ? info.Filename : info.Chapters, + Title = (specialTreatment && info.Format == MangaFormat.Book) + ? info.Title + : specialTitle, + Files = new List(), + IsSpecial = specialTreatment, + }; + } + } +} \ No newline at end of file diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs index 707e7c62c..c065bface 100644 --- a/API/Data/LibraryRepository.cs +++ b/API/Data/LibraryRepository.cs @@ -1,10 +1,9 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; +using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Entities.Enums; using API.Interfaces; using AutoMapper; using AutoMapper.QueryableExtensions; @@ -68,6 +67,15 @@ namespace API.Data .ToListAsync(); } + public async Task GetLibraryTypeAsync(int libraryId) + { + return await _context.Library + .Where(l => l.Id == libraryId) + .AsNoTracking() + .Select(l => l.Type) + .SingleAsync(); + } + public async Task> GetLibraryDtosAsync() { return await _context.Library diff --git a/API/Data/Migrations/20210419222000_BookReaderPreferences.Designer.cs b/API/Data/Migrations/20210419222000_BookReaderPreferences.Designer.cs new file mode 100644 index 000000000..eb4dd459a --- /dev/null +++ b/API/Data/Migrations/20210419222000_BookReaderPreferences.Designer.cs @@ -0,0 +1,748 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210419222000_BookReaderPreferences")] + partial class BookReaderPreferences + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210419222000_BookReaderPreferences.cs b/API/Data/Migrations/20210419222000_BookReaderPreferences.cs new file mode 100644 index 000000000..0dd1089eb --- /dev/null +++ b/API/Data/Migrations/20210419222000_BookReaderPreferences.cs @@ -0,0 +1,56 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class BookReaderPreferences : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.RenameColumn( + name: "HideReadOnDetails", + table: "AppUserPreferences", + newName: "BookReaderMargin"); + + migrationBuilder.AddColumn( + name: "BookReaderDarkMode", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: false); + + migrationBuilder.AddColumn( + name: "BookReaderFontFamily", + table: "AppUserPreferences", + type: "TEXT", + nullable: true, + defaultValue: "default"); + + migrationBuilder.AddColumn( + name: "BookReaderLineSpacing", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: 100); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "BookReaderDarkMode", + table: "AppUserPreferences"); + + migrationBuilder.DropColumn( + name: "BookReaderFontFamily", + table: "AppUserPreferences"); + + migrationBuilder.DropColumn( + name: "BookReaderLineSpacing", + table: "AppUserPreferences"); + + migrationBuilder.RenameColumn( + name: "BookReaderMargin", + table: "AppUserPreferences", + newName: "HideReadOnDetails"); + } + } +} diff --git a/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.Designer.cs b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.Designer.cs new file mode 100644 index 000000000..95005cf47 --- /dev/null +++ b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.Designer.cs @@ -0,0 +1,751 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210419234652_BookReaderPreferencesFontSize")] + partial class BookReaderPreferencesFontSize + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.cs b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.cs new file mode 100644 index 000000000..1745e4f73 --- /dev/null +++ b/API/Data/Migrations/20210419234652_BookReaderPreferencesFontSize.cs @@ -0,0 +1,24 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class BookReaderPreferencesFontSize : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "BookReaderFontSize", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: 100); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "BookReaderFontSize", + table: "AppUserPreferences"); + } + } +} diff --git a/API/Data/Migrations/20210423132900_CustomChapterTitle.Designer.cs b/API/Data/Migrations/20210423132900_CustomChapterTitle.Designer.cs new file mode 100644 index 000000000..693480dd3 --- /dev/null +++ b/API/Data/Migrations/20210423132900_CustomChapterTitle.Designer.cs @@ -0,0 +1,751 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210423132900_CustomChapterTitle")] + partial class CustomChapterTitle + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210423132900_CustomChapterTitle.cs b/API/Data/Migrations/20210423132900_CustomChapterTitle.cs new file mode 100644 index 000000000..b3958127c --- /dev/null +++ b/API/Data/Migrations/20210423132900_CustomChapterTitle.cs @@ -0,0 +1,34 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class CustomChapterTitle : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "IsSpecial", + table: "Volume"); + + migrationBuilder.AddColumn( + name: "Title", + table: "Chapter", + type: "TEXT", + nullable: true); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "Title", + table: "Chapter"); + + migrationBuilder.AddColumn( + name: "IsSpecial", + table: "Volume", + type: "INTEGER", + nullable: false, + defaultValue: false); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index 09fe5689f..abcb68281 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -14,7 +14,7 @@ namespace API.Data.Migrations { #pragma warning disable 612, 618 modelBuilder - .HasAnnotation("ProductVersion", "5.0.1"); + .HasAnnotation("ProductVersion", "5.0.4"); modelBuilder.Entity("API.Entities.AppRole", b => { @@ -127,7 +127,19 @@ namespace API.Data.Migrations b.Property("AppUserId") .HasColumnType("INTEGER"); - b.Property("HideReadOnDetails") + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") .HasColumnType("INTEGER"); b.Property("PageSplitOption") @@ -248,6 +260,9 @@ namespace API.Data.Migrations b.Property("Range") .HasColumnType("TEXT"); + b.Property("Title") + .HasColumnType("TEXT"); + b.Property("VolumeId") .HasColumnType("INTEGER"); @@ -412,9 +427,6 @@ namespace API.Data.Migrations b.Property("Created") .HasColumnType("TEXT"); - b.Property("IsSpecial") - .HasColumnType("INTEGER"); - b.Property("LastModified") .HasColumnType("TEXT"); diff --git a/API/Dockerfile b/API/Dockerfile new file mode 100644 index 000000000..d813139f8 --- /dev/null +++ b/API/Dockerfile @@ -0,0 +1,20 @@ +FROM mcr.microsoft.com/dotnet/aspnet:5.0 AS base +WORKDIR /app +EXPOSE 80 +EXPOSE 443 + +FROM mcr.microsoft.com/dotnet/sdk:5.0 AS build +WORKDIR /src +COPY ["API/API.csproj", "API/"] +RUN dotnet restore "API/API.csproj" +COPY . . +WORKDIR "/src/API" +RUN dotnet build "API.csproj" -c Release -o /app/build + +FROM build AS publish +RUN dotnet publish "API.csproj" -c Release -o /app/publish + +FROM base AS final +WORKDIR /app +COPY --from=publish /app/publish . +ENTRYPOINT ["dotnet", "API.dll"] diff --git a/API/Entities/AppUserPreferences.cs b/API/Entities/AppUserPreferences.cs index 1a2e6b41b..a4a773a38 100644 --- a/API/Entities/AppUserPreferences.cs +++ b/API/Entities/AppUserPreferences.cs @@ -5,13 +5,39 @@ namespace API.Entities public class AppUserPreferences { public int Id { get; set; } - public ReadingDirection ReadingDirection { get; set; } = ReadingDirection.LeftToRight; - public ScalingOption ScalingOption { get; set; } = ScalingOption.FitToHeight; - public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; /// - /// Whether UI hides read Volumes on Details page + /// Manga Reader Option: What direction should the next/prev page buttons go /// - public bool HideReadOnDetails { get; set; } = false; + public ReadingDirection ReadingDirection { get; set; } = ReadingDirection.LeftToRight; + /// + /// Manga Reader Option: How should the image be scaled to screen + /// + public ScalingOption ScalingOption { get; set; } = ScalingOption.FitToHeight; + /// + /// Manga Reader Option: Which side of a split image should we show first + /// + public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; + + /// + /// Book Reader Option: Should the background color be dark + /// + public bool BookReaderDarkMode { get; set; } = false; + /// + /// Book Reader Option: Override extra Margin + /// + public int BookReaderMargin { get; set; } = 15; + /// + /// Book Reader Option: Override line-height + /// + public int BookReaderLineSpacing { get; set; } = 100; + /// + /// Book Reader Option: Override font size + /// + public int BookReaderFontSize { get; set; } = 100; + /// + /// Book Reader Option: Maps to the default Kavita font-family (inherit) or an override + /// + public string BookReaderFontFamily { get; set; } = "default"; diff --git a/API/Entities/Chapter.cs b/API/Entities/Chapter.cs index 015c4e4d8..31f4dc513 100644 --- a/API/Entities/Chapter.cs +++ b/API/Entities/Chapter.cs @@ -1,6 +1,8 @@ using System; using System.Collections.Generic; +using API.Entities.Enums; using API.Entities.Interfaces; +using API.Parser; namespace API.Entities { @@ -30,10 +32,27 @@ namespace API.Entities /// If this Chapter contains files that could only be identified as Series or has Special Identifier from filename /// public bool IsSpecial { get; set; } + /// + /// Used for books/specials to display custom title. For non-specials/books, will be set to + /// + public string Title { get; set; } // Relationships public Volume Volume { get; set; } public int VolumeId { get; set; } + public void UpdateFrom(ParserInfo info) + { + Files ??= new List(); + IsSpecial = info.IsSpecialInfo(); + if (IsSpecial) + { + Number = "0"; + } + Title = (IsSpecial && info.Format == MangaFormat.Book) + ? info.Title + : Range; + + } } } \ No newline at end of file diff --git a/API/Entities/Enums/MangaFormat.cs b/API/Entities/Enums/MangaFormat.cs index 31ebd5bb3..121aa3e1c 100644 --- a/API/Entities/Enums/MangaFormat.cs +++ b/API/Entities/Enums/MangaFormat.cs @@ -9,6 +9,8 @@ namespace API.Entities.Enums [Description("Archive")] Archive = 1, [Description("Unknown")] - Unknown = 2 + Unknown = 2, + [Description("Book")] + Book = 3 } } \ No newline at end of file diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs index ddf2ea3fc..2efb76bfb 100644 --- a/API/Entities/MangaFile.cs +++ b/API/Entities/MangaFile.cs @@ -1,6 +1,8 @@  using System; +using System.IO; using API.Entities.Enums; +using API.Extensions; namespace API.Entities { @@ -24,5 +26,11 @@ namespace API.Entities // Relationship Mapping public Chapter Chapter { get; set; } public int ChapterId { get; set; } + + // Methods + public bool HasFileBeenModified() + { + return new FileInfo(FilePath).DoesLastWriteMatch(LastModified); + } } } \ No newline at end of file diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs index 6406e118f..0ad7c8c16 100644 --- a/API/Entities/Series.cs +++ b/API/Entities/Series.cs @@ -45,5 +45,6 @@ namespace API.Entities public List Volumes { get; set; } public Library Library { get; set; } public int LibraryId { get; set; } + } } \ No newline at end of file diff --git a/API/Entities/Volume.cs b/API/Entities/Volume.cs index 999b9a801..dab9f2e1b 100644 --- a/API/Entities/Volume.cs +++ b/API/Entities/Volume.cs @@ -15,12 +15,7 @@ namespace API.Entities public byte[] CoverImage { get; set; } public int Pages { get; set; } - /// - /// Represents a Side story that is linked to the original Series. Omake, One Shot, etc. - /// - public bool IsSpecial { get; set; } = false; - - + // Relationships public Series Series { get; set; } diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index 89b338e5c..b0e09e18f 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -1,4 +1,5 @@ using API.Data; +using API.Entities.Interfaces; using API.Helpers; using API.Interfaces; using API.Interfaces.Services; @@ -26,8 +27,8 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); services.AddScoped(); - - + services.AddScoped(); + services.AddDbContext(options => { diff --git a/API/Extensions/ChapterListExtensions.cs b/API/Extensions/ChapterListExtensions.cs new file mode 100644 index 000000000..6362c0571 --- /dev/null +++ b/API/Extensions/ChapterListExtensions.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Parser; + +namespace API.Extensions +{ + public static class ChapterListExtensions + { + /// + /// Returns first chapter in the list with at least one file + /// + /// + /// + public static Chapter GetFirstChapterWithFiles(this IList chapters) + { + return chapters.FirstOrDefault(c => c.Files.Any()); + } + + /// + /// Gets a single chapter (or null if doesn't exist) where Range matches the info.Chapters property. If the info + /// is then, the filename is used to search against Range or if filename exists within Files of said Chapter. + /// + /// + /// + /// + public static Chapter GetChapterByRange(this IList chapters, ParserInfo info) + { + var specialTreatment = info.IsSpecialInfo(); + return specialTreatment + ? chapters.SingleOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath))) + : chapters.SingleOrDefault(c => c.Range == info.Chapters); + } + } +} \ No newline at end of file diff --git a/API/Extensions/ParserInfoListExtensions.cs b/API/Extensions/ParserInfoListExtensions.cs new file mode 100644 index 000000000..adbf32c3c --- /dev/null +++ b/API/Extensions/ParserInfoListExtensions.cs @@ -0,0 +1,34 @@ +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Parser; + +namespace API.Extensions +{ + public static class ParserInfoListExtensions + { + /// + /// Selects distinct volume numbers by the "Volumes" key on the ParserInfo + /// + /// + /// + public static IList DistinctVolumes(this IList infos) + { + return infos.Select(p => p.Volumes).Distinct().ToList(); + } + + /// + /// Checks if a list of ParserInfos has a given chapter or not. Lookup occurs on Range property. If a chapter is + /// special, then the is matched, else the field is checked. + /// + /// + /// + /// + public static bool HasInfo(this IList infos, Chapter chapter) + { + return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range) + : infos.Any(v => v.Chapters == chapter.Range); + } + } +} \ No newline at end of file diff --git a/API/Extensions/SeriesExtensions.cs b/API/Extensions/SeriesExtensions.cs index 5680c52d2..29f495d76 100644 --- a/API/Extensions/SeriesExtensions.cs +++ b/API/Extensions/SeriesExtensions.cs @@ -1,4 +1,5 @@ using System.Collections.Generic; +using System.Linq; using API.Entities; namespace API.Extensions @@ -13,15 +14,7 @@ namespace API.Extensions /// public static bool NameInList(this Series series, IEnumerable list) { - foreach (var name in list) - { - if (Parser.Parser.Normalize(name) == series.NormalizedName || name == series.Name || name == series.LocalizedName || name == series.OriginalName) - { - return true; - } - } - - return false; + return list.Any(name => Parser.Parser.Normalize(name) == series.NormalizedName || Parser.Parser.Normalize(name) == Parser.Parser.Normalize(series.Name) || name == series.Name || name == series.LocalizedName || name == series.OriginalName); } } } \ No newline at end of file diff --git a/API/Extensions/VolumeListExtensions.cs b/API/Extensions/VolumeListExtensions.cs new file mode 100644 index 000000000..5b50d382f --- /dev/null +++ b/API/Extensions/VolumeListExtensions.cs @@ -0,0 +1,38 @@ +using System.Collections.Generic; +using System.Linq; +using API.Entities; +using API.Entities.Enums; + +namespace API.Extensions +{ + public static class VolumeListExtensions + { + public static Volume FirstWithChapters(this IList volumes, bool inBookSeries) + { + return inBookSeries + ? volumes.FirstOrDefault(v => v.Chapters.Any()) + : volumes.FirstOrDefault(v => v.Chapters.Any() && (v.Number == 1)); + } + + /// + /// Selects the first Volume to get the cover image from. For a book with only a special, the special will be returned. + /// If there are both specials and non-specials, then the first non-special will be returned. + /// + /// + /// + /// + public static Volume GetCoverImage(this IList volumes, LibraryType libraryType) + { + if (libraryType == LibraryType.Book) + { + return volumes.OrderBy(x => x.Number).FirstOrDefault(); + } + + if (volumes.Any(x => x.Number != 0)) + { + return volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + } + return volumes.OrderBy(x => x.Number).FirstOrDefault(); + } + } +} \ No newline at end of file diff --git a/API/Helpers/Converters/CronConverter.cs b/API/Helpers/Converters/CronConverter.cs index 6fece1bdb..cacf018b1 100644 --- a/API/Helpers/Converters/CronConverter.cs +++ b/API/Helpers/Converters/CronConverter.cs @@ -13,7 +13,7 @@ namespace API.Helpers.Converters }; public static string ConvertToCronNotation(string source) { - string destination = ""; + var destination = string.Empty; destination = source.ToLower() switch { "daily" => Cron.Daily(), @@ -28,7 +28,7 @@ namespace API.Helpers.Converters public static string ConvertFromCronNotation(string cronNotation) { - string destination = ""; + var destination = string.Empty; destination = cronNotation.ToLower() switch { "0 0 31 2 *" => "disabled", diff --git a/API/Interfaces/IAppUserProgressRepository.cs b/API/Interfaces/IAppUserProgressRepository.cs index a268ac5f5..96ada0c50 100644 --- a/API/Interfaces/IAppUserProgressRepository.cs +++ b/API/Interfaces/IAppUserProgressRepository.cs @@ -1,9 +1,11 @@ using System.Threading.Tasks; +using API.Entities.Enums; namespace API.Interfaces { public interface IAppUserProgressRepository { Task CleanupAbandonedChapters(); + Task UserHasProgress(LibraryType libraryType, int userId); } } \ No newline at end of file diff --git a/API/Interfaces/IBookService.cs b/API/Interfaces/IBookService.cs new file mode 100644 index 000000000..43c3cd479 --- /dev/null +++ b/API/Interfaces/IBookService.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using VersOne.Epub; + +namespace API.Interfaces +{ + public interface IBookService + { + int GetNumberOfPages(string filePath); + byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true); + Task> CreateKeyToPageMappingAsync(EpubBookRef book); + /// + /// Scopes styles to .reading-section and replaces img src to the passed apiBase + /// + /// + /// + /// + Task ScopeStyles(string stylesheetHtml, string apiBase); + string GetSummaryInfo(string filePath); + } +} \ No newline at end of file diff --git a/API/Interfaces/ILibraryRepository.cs b/API/Interfaces/ILibraryRepository.cs index 43e0db6e6..f8cedce90 100644 --- a/API/Interfaces/ILibraryRepository.cs +++ b/API/Interfaces/ILibraryRepository.cs @@ -2,6 +2,7 @@ using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Entities.Enums; namespace API.Interfaces { @@ -17,5 +18,6 @@ namespace API.Interfaces Task> GetLibrariesAsync(); Task DeleteLibrary(int libraryId); Task> GetLibrariesForUserIdAsync(int userId); + Task GetLibraryTypeAsync(int libraryId); } } \ No newline at end of file diff --git a/API/Middleware/BookRedirectMiddleware.cs b/API/Middleware/BookRedirectMiddleware.cs new file mode 100644 index 000000000..f2e805466 --- /dev/null +++ b/API/Middleware/BookRedirectMiddleware.cs @@ -0,0 +1,22 @@ +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; + +namespace API.Middleware +{ + public class BookRedirectMiddleware + { + private readonly ILogger _logger; + + public BookRedirectMiddleware(ILogger logger) + { + _logger = logger; + } + + public async Task InvokeAsync(HttpContext context, RequestDelegate next) + { + _logger.LogDebug("BookRedirect Path: {Path}", context.Request.Path.ToString()); + await next.Invoke(context); + } + } +} \ No newline at end of file diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index dc2e701e2..fbf5717e0 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -9,14 +9,19 @@ namespace API.Parser { public static class Parser { - public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip"; + public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip"; + public static readonly string BookFileExtensions = @"\.epub"; public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; + public static readonly Regex FontSrcUrlRegex = new Regex("(src:url\\(\"?'?)([a-z0-9/\\._]+)(\"?'?\\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly string XmlRegexExtensions = @"\.xml"; private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex BookFileRegex = new Regex(BookFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly Regex CoverImageRegex = new Regex(@"(?.*)( |_)Vol\.?\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip new Regex( @"(?.*)(\b|_)v(?\d+-?\d*)( |_)", @@ -126,10 +135,7 @@ namespace API.Parser new Regex( @"^(?!Vol)(?.*)( |_)Chapter( |_)(\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar - new Regex( - @"^(?.*)( |_)Vol\.?\d+", - RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Fullmetal Alchemist chapters 101-108.cbz new Regex( @"^(?!vol)(?.*)( |_)(chapters( |_)?)\d+-?\d*", @@ -238,21 +244,21 @@ namespace API.Parser private static readonly Regex[] ComicChapterRegex = new[] { - // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) - new Regex( - @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled), - // 01 Spider-Man & Wolverine 01.cbr - new Regex( - @"^(?\d+) (?:- )?(?.*) (\d+)?", - RegexOptions.IgnoreCase | RegexOptions.Compiled), + // // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) + // new Regex( + // @"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)", + // RegexOptions.IgnoreCase | RegexOptions.Compiled), + // // 01 Spider-Man & Wolverine 01.cbr + // new Regex( + // @"^(?\d+) (?:- )?(?.*) (\d+)?", // NOTE: WHy is this here without a capture group + // RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Wildcat (1 of 3) new Regex( @"(?.*(\d{4})?)( |_)(?:\((?\d+) of \d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( - @"^(?.*)(?: |_)v(?\d+)(?: |_)(c? ?)(?\d+)", + @"^(?.*)(?: |_)v(?\d+)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( @@ -262,6 +268,10 @@ namespace API.Parser new Regex( @"^(?.*)(?: |_)#(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr + new Regex( + @"^(?.*)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", + RegexOptions.IgnoreCase | RegexOptions.Compiled), }; private static readonly Regex[] ReleaseGroupRegex = new[] @@ -350,7 +360,7 @@ namespace API.Parser { // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. new Regex( - @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection)", + @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories)", RegexOptions.IgnoreCase | RegexOptions.Compiled), }; @@ -366,17 +376,34 @@ namespace API.Parser public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga) { var fileName = Path.GetFileName(filePath); + ParserInfo ret; - var ret = new ParserInfo() + if (type == LibraryType.Book) { - Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), - Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), - Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), - Filename = fileName, - Format = ParseFormat(filePath), - FullFilePath = filePath - }; - + ret = new ParserInfo() + { + Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName), + Series = ParseSeries(fileName) ?? ParseComicSeries(fileName), + Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName), + Filename = fileName, + Format = ParseFormat(filePath), + FullFilePath = filePath + }; + } + else + { + ret = new ParserInfo() + { + Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), + Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), + Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), + Filename = fileName, + Format = ParseFormat(filePath), + Title = Path.GetFileNameWithoutExtension(fileName), + FullFilePath = filePath + }; + } + if (ret.Series == string.Empty) { // Try to parse information out of each folder all the way to rootPath @@ -412,6 +439,8 @@ namespace API.Parser } var isSpecial = ParseMangaSpecial(fileName); + // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that + // could cause a problem as Omake is a special term, but there is valid volume/chapter information. if (ret.Chapters == "0" && ret.Volumes == "0" && !string.IsNullOrEmpty(isSpecial)) { ret.IsSpecial = true; @@ -426,6 +455,7 @@ namespace API.Parser { if (IsArchive(filePath)) return MangaFormat.Archive; if (IsImage(filePath)) return MangaFormat.Image; + if (IsBook(filePath)) return MangaFormat.Book; return MangaFormat.Unknown; } @@ -520,7 +550,7 @@ namespace API.Parser return "0"; } - + public static string ParseComicVolume(string filename) { foreach (var regex in ComicVolumeRegex) @@ -735,6 +765,10 @@ namespace API.Parser { return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath)); } + public static bool IsBook(string filePath) + { + return BookFileRegex.IsMatch(Path.GetExtension(filePath)); + } public static bool IsImage(string filePath, bool suppressExtraChecks = false) { @@ -749,13 +783,13 @@ namespace API.Parser public static float MinimumNumberFromRange(string range) { - var tokens = range.Split("-"); + var tokens = range.Replace("_", string.Empty).Split("-"); return tokens.Min(float.Parse); } public static string Normalize(string name) { - return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", "").Replace("_", ""); + return Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty); } /// @@ -773,6 +807,10 @@ namespace API.Parser return path.Contains("__MACOSX"); } - + + public static bool IsEpub(string filePath) + { + return Path.GetExtension(filePath).ToLower() == ".epub"; + } } } \ No newline at end of file diff --git a/API/Parser/ParserInfo.cs b/API/Parser/ParserInfo.cs index 4b7d5985e..c5499d797 100644 --- a/API/Parser/ParserInfo.cs +++ b/API/Parser/ParserInfo.cs @@ -7,16 +7,36 @@ namespace API.Parser /// public class ParserInfo { - // This can be multiple + /// + /// Represents the parsed chapters from a file. By default, will be 0 which means nothing could be parsed. + /// The chapters can only be a single float or a range of float ie) 1-2. Mainly floats should be multiples of 0.5 representing specials + /// public string Chapters { get; set; } = ""; + /// + /// Represents the parsed series from the file or folder + /// public string Series { get; set; } = ""; - // This can be multiple + /// + /// Represents the parsed volumes from a file. By default, will be 0 which means that nothing could be parsed. + /// If Volumes is 0 and Chapters is 0, the file is a special. If Chapters is non-zero, then no volume could be parsed. + /// Beastars Vol 3-4 will map to "3-4" + /// The volumes can only be a single int or a range of ints ie) 1-2. Float based volumes are not supported. + /// public string Volumes { get; set; } = ""; + /// + /// Filename of the underlying file + /// Beastars v01 (digital).cbz + /// public string Filename { get; init; } = ""; + /// + /// Full filepath of the underlying file + /// C:/Manga/Beastars v01 (digital).cbz + /// public string FullFilePath { get; set; } = ""; /// - /// that represents the type of the file (so caching service knows how to cache for reading) + /// that represents the type of the file + /// Mainly used to show in the UI and so caching service knows how to cache for reading. /// public MangaFormat Format { get; set; } = MangaFormat.Unknown; @@ -26,8 +46,38 @@ namespace API.Parser public string Edition { get; set; } = ""; /// - /// If the file contains no volume/chapter information and contains Special Keywords + /// If the file contains no volume/chapter information or contains Special Keywords /// public bool IsSpecial { get; set; } = false; + + /// + /// Used for specials or books, stores what the UI should show. + /// Manga does not use this field + /// + public string Title { get; set; } = string.Empty; + + /// + /// If the ParserInfo has the IsSpecial tag or both volumes and chapters are default aka 0 + /// + /// + public bool IsSpecialInfo() + { + return (IsSpecial || (Volumes == "0" && Chapters == "0")); + } + + /// + /// Merges non empty/null properties from info2 into this entity. + /// + /// + public void Merge(ParserInfo info2) + { + if (info2 == null) return; + Chapters = string.IsNullOrEmpty(Chapters) || Chapters == "0" ? info2.Chapters: Chapters; + Volumes = string.IsNullOrEmpty(Volumes) || Volumes == "0" ? info2.Volumes : Volumes; + Edition = string.IsNullOrEmpty(Edition) ? info2.Edition : Edition; + Title = string.IsNullOrEmpty(Title) ? info2.Title : Title; + Series = string.IsNullOrEmpty(Series) ? info2.Series : Series; + IsSpecial = IsSpecial || info2.IsSpecial; + } } } \ No newline at end of file diff --git a/API/Program.cs b/API/Program.cs index ca814beb9..f65bba4ff 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -2,7 +2,6 @@ using System; using System.Threading.Tasks; using API.Data; using API.Entities; -using API.Services; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Identity; using Microsoft.EntityFrameworkCore; @@ -40,13 +39,6 @@ namespace API var logger = services.GetRequiredService < ILogger>(); logger.LogError(ex, "An error occurred during migration"); } - - // Load all tasks from DI and initialize them (TODO: This is not working - WarmupServicesStartupTask is Null) - var startupTasks = host.Services.GetServices(); - foreach (var startupTask in startupTasks) - { - await startupTask.ExecuteAsync(); - } await host.RunAsync(); } diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 2e9f717f1..fc6b8d444 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -21,11 +21,12 @@ namespace API.Services /// /// Responsible for manipulating Archive files. Used by and /// + // ReSharper disable once ClassWithVirtualMembersNeverInherited.Global public class ArchiveService : IArchiveService { private readonly ILogger _logger; private const int ThumbnailWidth = 320; // 153w x 230h - private static readonly RecyclableMemoryStreamManager _streamManager = new(); + private static readonly RecyclableMemoryStreamManager StreamManager = new(); private readonly NaturalSortComparer _comparer; public ArchiveService(ILogger logger) @@ -41,7 +42,7 @@ namespace API.Services /// public virtual ArchiveLibrary CanOpen(string archivePath) { - if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath)) return ArchiveLibrary.NotSupported; + if (!(File.Exists(archivePath) && Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; try { @@ -172,7 +173,7 @@ namespace API.Services var entryName = FindFolderEntry(entryNames) ?? FirstFileEntry(entryNames); var entry = archive.Entries.Single(e => e.Key == entryName); - using var ms = _streamManager.GetStream(); + using var ms = StreamManager.GetStream(); entry.WriteTo(ms); ms.Position = 0; @@ -197,7 +198,7 @@ namespace API.Services private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry) { using var stream = entry.Open(); - using var ms = _streamManager.GetStream(); + using var ms = StreamManager.GetStream(); stream.CopyTo(ms); return ms.ToArray(); } @@ -248,7 +249,7 @@ namespace API.Services return false; } - if (Parser.Parser.IsArchive(archivePath)) return true; + if (Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath)) return true; _logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); return false; @@ -261,7 +262,7 @@ namespace API.Services { if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && !Parser.Parser.HasBlacklistedFolderInPath(entry.Key) && Parser.Parser.IsXml(entry.Key)) { - using var ms = _streamManager.GetStream(); + using var ms = StreamManager.GetStream(); entry.WriteTo(ms); ms.Position = 0; @@ -398,10 +399,10 @@ namespace API.Services break; } case ArchiveLibrary.NotSupported: - _logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError("[ExtractArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath); return; default: - _logger.LogError("[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); + _logger.LogError("[ExtractArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath); return; } diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs new file mode 100644 index 000000000..046dbcb40 --- /dev/null +++ b/API/Services/BookService.cs @@ -0,0 +1,257 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using API.Entities.Enums; +using API.Entities.Interfaces; +using API.Interfaces; +using API.Parser; +using ExCSS; +using HtmlAgilityPack; +using Microsoft.Extensions.Logging; +using NetVips; +using VersOne.Epub; +using VersOne.Epub.Schema; + +namespace API.Services +{ + public class BookService : IBookService + { + private readonly ILogger _logger; + + private const int ThumbnailWidth = 320; // 153w x 230h + private readonly StylesheetParser _cssParser = new (); + + public BookService(ILogger logger) + { + _logger = logger; + } + + private static bool HasClickableHrefPart(HtmlNode anchor) + { + return anchor.GetAttributeValue("href", string.Empty).Contains("#") + && anchor.GetAttributeValue("tabindex", string.Empty) != "-1" + && anchor.GetAttributeValue("role", string.Empty) != "presentation"; + } + + public static string GetContentType(EpubContentType type) + { + string contentType; + switch (type) + { + case EpubContentType.IMAGE_GIF: + contentType = "image/gif"; + break; + case EpubContentType.IMAGE_PNG: + contentType = "image/png"; + break; + case EpubContentType.IMAGE_JPEG: + contentType = "image/jpeg"; + break; + case EpubContentType.FONT_OPENTYPE: + contentType = "font/otf"; + break; + case EpubContentType.FONT_TRUETYPE: + contentType = "font/ttf"; + break; + case EpubContentType.IMAGE_SVG: + contentType = "image/svg+xml"; + break; + default: + contentType = "application/octet-stream"; + break; + } + + return contentType; + } + + public static void UpdateLinks(HtmlNode anchor, Dictionary mappings, int currentPage) + { + if (anchor.Name != "a") return; + var hrefParts = BookService.CleanContentKeys(anchor.GetAttributeValue("href", string.Empty)) + .Split("#"); + var mappingKey = hrefParts[0]; + if (!mappings.ContainsKey(mappingKey)) + { + if (HasClickableHrefPart(anchor)) + { + var part = hrefParts.Length > 1 + ? hrefParts[1] + : anchor.GetAttributeValue("href", string.Empty); + anchor.Attributes.Add("kavita-page", $"{currentPage}"); + anchor.Attributes.Add("kavita-part", part); + anchor.Attributes.Remove("href"); + anchor.Attributes.Add("href", "javascript:void(0)"); + } + else + { + anchor.Attributes.Add("target", "_blank"); + } + + return; + } + + var mappedPage = mappings[mappingKey]; + anchor.Attributes.Add("kavita-page", $"{mappedPage}"); + if (hrefParts.Length > 1) + { + anchor.Attributes.Add("kavita-part", + hrefParts[1]); + } + + anchor.Attributes.Remove("href"); + anchor.Attributes.Add("href", "javascript:void(0)"); + } + + public async Task ScopeStyles(string stylesheetHtml, string apiBase) + { + var styleContent = RemoveWhiteSpaceFromStylesheets(stylesheetHtml); + styleContent = + Parser.Parser.FontSrcUrlRegex.Replace(styleContent, "$1" + apiBase + "$2" + "$3"); + + styleContent = styleContent.Replace("body", ".reading-section"); + + var stylesheet = await _cssParser.ParseAsync(styleContent); + foreach (var styleRule in stylesheet.StyleRules) + { + if (styleRule.Selector.Text == ".reading-section") continue; + if (styleRule.Selector.Text.Contains(",")) + { + styleRule.Text = styleRule.Text.Replace(styleRule.SelectorText, + string.Join(", ", + styleRule.Selector.Text.Split(",").Select(s => ".reading-section " + s))); + continue; + } + styleRule.Text = ".reading-section " + styleRule.Text; + } + return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss()); + } + + public string GetSummaryInfo(string filePath) + { + if (!IsValidFile(filePath)) return string.Empty; + + var epubBook = EpubReader.OpenBook(filePath); + return epubBook.Schema.Package.Metadata.Description; + } + + private bool IsValidFile(string filePath) + { + if (!File.Exists(filePath)) + { + _logger.LogError("Book {EpubFile} could not be found", filePath); + return false; + } + + if (Parser.Parser.IsBook(filePath)) return true; + + _logger.LogError("Book {EpubFile} is not a valid EPUB", filePath); + return false; + } + + public int GetNumberOfPages(string filePath) + { + if (!IsValidFile(filePath) || !Parser.Parser.IsEpub(filePath)) return 0; + + try + { + var epubBook = EpubReader.OpenBook(filePath); + return epubBook.Content.Html.Count; + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception getting number of pages, defaulting to 0"); + } + + return 0; + } + + public static string CleanContentKeys(string key) + { + return key.Replace("../", string.Empty); + } + + public async Task> CreateKeyToPageMappingAsync(EpubBookRef book) + { + var dict = new Dictionary(); + var pageCount = 0; + foreach (var contentFileRef in await book.GetReadingOrderAsync()) + { + if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) continue; + dict.Add(contentFileRef.FileName, pageCount); + pageCount += 1; + } + + return dict; + } + + public static ParserInfo ParseInfo(string filePath) + { + var epubBook = EpubReader.OpenBook(filePath); + + return new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Book, + Filename = Path.GetFileName(filePath), + Title = epubBook.Title, + FullFilePath = filePath, + IsSpecial = false, + Series = epubBook.Title, + Volumes = "0" + }; + } + + public byte[] GetCoverImage(string fileFilePath, bool createThumbnail = true) + { + if (!IsValidFile(fileFilePath)) return Array.Empty(); + + var epubBook = EpubReader.OpenBook(fileFilePath); + + + try + { + // Try to get the cover image from OPF file, if not set, try to parse it from all the files, then result to the first one. + var coverImageContent = epubBook.Content.Cover + ?? epubBook.Content.Images.Values.FirstOrDefault(file => Parser.Parser.IsCoverImage(file.FileName)) + ?? epubBook.Content.Images.Values.First(); + + if (coverImageContent == null) return Array.Empty(); + + if (createThumbnail) + { + using var stream = new MemoryStream(coverImageContent.ReadContent()); + + using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); + return thumbnail.WriteToBuffer(".jpg"); + } + + return coverImageContent.ReadContent(); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); + } + + return Array.Empty(); + } + + private static string RemoveWhiteSpaceFromStylesheets(string body) + { + body = Regex.Replace(body, @"[a-zA-Z]+#", "#"); + body = Regex.Replace(body, @"[\n\r]+\s*", string.Empty); + body = Regex.Replace(body, @"\s+", " "); + body = Regex.Replace(body, @"\s?([:,;{}])\s?", "$1"); + body = body.Replace(";}", "}"); + body = Regex.Replace(body, @"([\s:]0)(px|pt|%|em)", "$1"); + + // Remove comments from CSS + body = Regex.Replace(body, @"/\*[\d\D]*?\*/", string.Empty); + + return body; + } + } +} \ No newline at end of file diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index 2a2b8e482..4dcad4dc5 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -4,6 +4,7 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; using API.Entities; +using API.Entities.Enums; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; @@ -20,7 +21,8 @@ namespace API.Services private readonly NumericComparer _numericComparer; public static readonly string CacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "cache/")); - public CacheService(ILogger logger, IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService) + public CacheService(ILogger logger, IUnitOfWork unitOfWork, IArchiveService archiveService, + IDirectoryService directoryService) { _logger = logger; _unitOfWork = unitOfWork; @@ -31,7 +33,6 @@ namespace API.Services public void EnsureCacheDirectory() { - _logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory); if (!DirectoryService.ExistOrCreate(CacheDirectory)) { _logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory); @@ -53,7 +54,12 @@ namespace API.Services { extraPath = file.Id + ""; } - _archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath)); + + if (file.Format == MangaFormat.Archive) + { + _archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath)); + } + } if (fileCount > 1) @@ -123,6 +129,11 @@ namespace API.Services var path = GetCachePath(chapter.Id); var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); Array.Sort(files, _numericComparer); + + if (files.Length == 0) + { + return (files.ElementAt(0), mangaFile); + } // Since array is 0 based, we need to keep that in account (only affects last image) if (page == files.Length) diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 1049e3aae..95a48a29e 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -58,8 +58,7 @@ namespace API.Services { rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); } - // NOTE: I Could use Path.GetRelativePath and split on separator character instead. - + var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath; var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath; var paths = new List(); @@ -215,9 +214,9 @@ namespace API.Services /// Action to apply on file path /// Regex pattern to search against /// - public static int TraverseTreeParallelForEach(string root, Action action, string searchPattern) - { - //Count of files traversed and timer for diagnostic output + public static int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger) + { + //Count of files traversed and timer for diagnostic output var fileCount = 0; // Determine whether to parallelize file processing on each folder based on processor count. @@ -242,11 +241,13 @@ namespace API.Services // Thrown if we do not have discovery permission on the directory. catch (UnauthorizedAccessException e) { Console.WriteLine(e.Message); + logger.LogError(e, "Unauthorized access on {Directory}", currentDir); continue; } // Thrown if another process has deleted the directory after we retrieved its name. catch (DirectoryNotFoundException e) { Console.WriteLine(e.Message); + logger.LogError(e, "Directory not found on {Directory}", currentDir); continue; } @@ -268,24 +269,27 @@ namespace API.Services } // Execute in parallel if there are enough files in the directory. - // Otherwise, execute sequentially.Files are opened and processed + // Otherwise, execute sequentially. Files are opened and processed // synchronously but this could be modified to perform async I/O. try { - if (files.Length < procCount) { - foreach (var file in files) { - action(file); - fileCount++; - } - } - else { - Parallel.ForEach(files, () => 0, (file, _, localCount) => - { action(file); - return ++localCount; - }, - (c) => { - // ReSharper disable once AccessToModifiedClosure - Interlocked.Add(ref fileCount, c); - }); + // if (files.Length < procCount) { + // foreach (var file in files) { + // action(file); + // fileCount++; + // } + // } + // else { + // Parallel.ForEach(files, () => 0, (file, _, localCount) => + // { action(file); + // return ++localCount; + // }, + // (c) => { + // Interlocked.Add(ref fileCount, c); + // }); + // } + foreach (var file in files) { + action(file); + fileCount++; } } catch (AggregateException ae) { diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index f86cb3595..5e1f125bb 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -5,6 +5,8 @@ using System.IO; using System.Linq; using System.Threading.Tasks; using API.Entities; +using API.Entities.Enums; +using API.Entities.Interfaces; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; @@ -17,12 +19,14 @@ namespace API.Services private readonly IUnitOfWork _unitOfWork; private readonly ILogger _logger; private readonly IArchiveService _archiveService; + private readonly IBookService _bookService; - public MetadataService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService) + public MetadataService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, IBookService bookService) { _unitOfWork = unitOfWork; _logger = logger; _archiveService = archiveService; + _bookService = bookService; } private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false) @@ -30,13 +34,25 @@ namespace API.Services return forceUpdate || coverImage == null || !coverImage.Any(); } + private byte[] GetCoverImage(MangaFile file, bool createThumbnail = true) + { + if (file.Format == MangaFormat.Book) + { + return _bookService.GetCoverImage(file.FilePath, createThumbnail); + } + else + { + return _archiveService.GetCoverImage(file.FilePath, createThumbnail); + } + } + public void UpdateMetadata(Chapter chapter, bool forceUpdate) { var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault(); if (ShouldFindCoverImage(chapter.CoverImage, forceUpdate) && firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) { chapter.Files ??= new List(); - chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); + chapter.CoverImage = GetCoverImage(firstFile); } } @@ -55,7 +71,7 @@ namespace API.Services var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault(); if (firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)) { - volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true); + volume.CoverImage = GetCoverImage(firstFile); } } else @@ -72,7 +88,7 @@ namespace API.Services if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) { series.Volumes ??= new List(); - var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0); + var firstCover = series.Volumes.GetCoverImage(series.Library.Type); byte[] coverImage = null; if (firstCover == null && series.Volumes.Any()) { @@ -92,24 +108,33 @@ namespace API.Services series.CoverImage = firstCover?.CoverImage ?? coverImage; } + UpdateSeriesSummary(series, forceUpdate); + } + + private void UpdateSeriesSummary(Series series, bool forceUpdate) + { if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return; - var firstVolume = series.Volumes.FirstOrDefault(v => v.Chapters.Any() && v.Number == 1); - var firstChapter = firstVolume?.Chapters.FirstOrDefault(c => c.Files.Any()); - + var isBook = series.Library.Type == LibraryType.Book; + var firstVolume = series.Volumes.FirstWithChapters(isBook); + var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles(); + + // NOTE: This suffers from code changes not taking effect due to stale data var firstFile = firstChapter?.Files.FirstOrDefault(); - if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified)) + if (firstFile != null && + (forceUpdate || !firstFile.HasFileBeenModified())) { - series.Summary = _archiveService.GetSummaryInfo(firstFile.FilePath); + series.Summary = isBook ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); + firstFile.LastModified = DateTime.Now; } } - - + + public void RefreshMetadata(int libraryId, bool forceUpdate = false) { var sw = Stopwatch.StartNew(); - var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result; + var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult(); // TODO: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used _logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name); diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index 23936db8a..8857865c0 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -20,7 +20,7 @@ namespace API.Services private readonly ICleanupService _cleanupService; public static BackgroundJobServer Client => new BackgroundJobServer(); - + public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService) @@ -32,20 +32,19 @@ namespace API.Services _metadataService = metadataService; _backupService = backupService; _cleanupService = cleanupService; - - ScheduleTasks(); } public void ScheduleTasks() { _logger.LogInformation("Scheduling reoccurring tasks"); - string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).GetAwaiter().GetResult().Value; + var setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).GetAwaiter().GetResult().Value; if (setting != null) { - _logger.LogDebug("Scheduling Scan Library Task for {Setting}", setting); + var scanLibrarySetting = setting; + _logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting); RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), - () => CronConverter.ConvertToCronNotation(setting)); + () => CronConverter.ConvertToCronNotation(scanLibrarySetting)); } else { @@ -69,7 +68,7 @@ namespace API.Services public void ScanLibrary(int libraryId, bool forceUpdate = false) { _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); - BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); + BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); // When we do a scan, force cache to re-unpack in case page numbers change BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); } diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index bc812bf00..b6ca2c2be 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -6,8 +6,10 @@ using System.IO; using System.Linq; using System.Threading.Tasks; using API.Comparators; +using API.Data; using API.Entities; using API.Entities.Enums; +using API.Entities.Interfaces; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; @@ -23,17 +25,19 @@ namespace API.Services.Tasks private readonly ILogger _logger; private readonly IArchiveService _archiveService; private readonly IMetadataService _metadataService; + private readonly IBookService _bookService; private ConcurrentDictionary> _scannedSeries; private readonly NaturalSortComparer _naturalSort; public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService, - IMetadataService metadataService) + IMetadataService metadataService, IBookService bookService) { _unitOfWork = unitOfWork; _logger = logger; _archiveService = archiveService; _metadataService = metadataService; - _naturalSort = new NaturalSortComparer(true); + _bookService = bookService; + _naturalSort = new NaturalSortComparer(); } @@ -43,13 +47,14 @@ namespace API.Services.Tasks var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList(); foreach (var lib in libraries) { + // BUG?: I think we need to keep _scannedSeries within the ScanLibrary instance since this is multithreaded. ScanLibrary(lib.Id, false); } } private bool ShouldSkipFolderScan(FolderPath folder, ref int skippedFolders) { - // NOTE: This solution isn't the best, but it has potential. We need to handle a few other cases so it works great. + // NOTE: The only way to skip folders is if Directory hasn't been modified, we aren't doing a forcedUpdate and version hasn't changed between scans. return false; // if (!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned) @@ -66,6 +71,7 @@ namespace API.Services.Tasks public void ScanLibrary(int libraryId, bool forceUpdate) { var sw = Stopwatch.StartNew(); + _scannedSeries = new ConcurrentDictionary>(); Library library; try { @@ -79,260 +85,281 @@ namespace API.Services.Tasks } - _logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate); - - _scannedSeries = new ConcurrentDictionary>(); - - var totalFiles = 0; - var skippedFolders = 0; - foreach (var folderPath in library.Folders) - { - if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue; - - try { - totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) => - { - try - { - ProcessFile(f, folderPath.Path, library.Type); - } - catch (FileNotFoundException exception) - { - _logger.LogError(exception, "The file {Filename} could not be found", f); - } - }, Parser.Parser.ArchiveFileExtensions); - } - catch (ArgumentException ex) { - _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path); - } - - folderPath.LastScanned = DateTime.Now; - } - - var scanElapsedTime = sw.ElapsedMilliseconds; - _logger.LogInformation("Folders Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, scanElapsedTime); - sw.Restart(); - if (skippedFolders == library.Folders.Count) - { - _logger.LogInformation("All Folders were skipped due to no modifications to the directories"); - _unitOfWork.LibraryRepository.Update(library); - _scannedSeries = null; - _logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name); - return; - } - - // Remove any series where there were no parsed infos - var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0); - var series = filtered.ToDictionary(v => v.Key, v => v.Value); - + var series = ScanLibrariesForSeries(forceUpdate, library, sw, out var totalFiles, out var scanElapsedTime); UpdateLibrary(library, series); + _unitOfWork.LibraryRepository.Update(library); - if (Task.Run(() => _unitOfWork.Complete()).Result) { - _logger.LogInformation("Scan completed on {LibraryName}. Parsed {ParsedSeriesCount} series in {ElapsedScanTime} ms", library.Name, series.Keys.Count, sw.ElapsedMilliseconds); + _logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); } else { - _logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan"); + _logger.LogCritical("There was a critical error that resulted in a failed scan. Please check logs and rescan"); } - _scannedSeries = null; - - _logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); - - // Cleanup any user progress that doesn't exist - var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result; - _logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp); + + CleanupUserProgress(); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); } - + /// + /// Remove any user progress rows that no longer exist since scan library ran and deleted series/volumes/chapters + /// + private void CleanupUserProgress() + { + var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result; + _logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp); + } + + private Dictionary> ScanLibrariesForSeries(bool forceUpdate, Library library, Stopwatch sw, out int totalFiles, + out long scanElapsedTime) + { + _logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, + forceUpdate); + totalFiles = 0; + var skippedFolders = 0; + foreach (var folderPath in library.Folders) + { + if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue; + + // NOTE: we can refactor this to allow all filetypes and handle everything in the ProcessFile to allow mixed library types. + var searchPattern = Parser.Parser.ArchiveFileExtensions; + if (library.Type == LibraryType.Book) + { + searchPattern = Parser.Parser.BookFileExtensions; + } + + try + { + totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) => + { + try + { + ProcessFile(f, folderPath.Path, library.Type); + } + catch (FileNotFoundException exception) + { + _logger.LogError(exception, "The file {Filename} could not be found", f); + } + }, searchPattern, _logger); + } + catch (ArgumentException ex) + { + _logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path); + } + + folderPath.LastScanned = DateTime.Now; + } + + scanElapsedTime = sw.ElapsedMilliseconds; + _logger.LogInformation("Folders Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles, + scanElapsedTime); + sw.Restart(); + if (skippedFolders == library.Folders.Count) + { + _logger.LogInformation("All Folders were skipped due to no modifications to the directories"); + _unitOfWork.LibraryRepository.Update(library); + _scannedSeries = null; + _logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", + totalFiles, sw.ElapsedMilliseconds, library.Name); + return new Dictionary>(); + } + + return SeriesWithInfos(_scannedSeries); + } + + /// + /// Returns any series where there were parsed infos + /// + /// + /// + private static Dictionary> SeriesWithInfos(IDictionary> scannedSeries) + { + var filtered = scannedSeries.Where(kvp => kvp.Value.Count > 0); + var series = filtered.ToDictionary(v => v.Key, v => v.Value); + return series; + } + + private void UpdateLibrary(Library library, Dictionary> parsedSeries) { if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries)); - + // First, remove any series that are not in parsedSeries list - var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries); - var removeCount = RemoveMissingSeries(library.Series, missingSeries); - _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount); + var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries).ToList(); + library.Series = RemoveMissingSeries(library.Series, missingSeries, out var removeCount); + if (removeCount > 0) + { + _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount); + foreach (var s in missingSeries) + { + _logger.LogDebug("Removed {SeriesName}", s.Name); + } + } + // Add new series that have parsedInfos foreach (var (key, infos) in parsedSeries) { - var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key)); + // Key is normalized already + var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key); if (existingSeries == null) { - var name = infos.Count > 0 ? infos[0].Series : key; - existingSeries = new Series() - { - Name = name, - OriginalName = name, - LocalizedName = name, - NormalizedName = Parser.Parser.Normalize(key), - SortName = key, - Summary = "", - Volumes = new List() - }; + existingSeries = DbFactory.Series(infos[0].Series); library.Series.Add(existingSeries); - } - existingSeries.NormalizedName = Parser.Parser.Normalize(key); - existingSeries.LocalizedName ??= key; + } + + existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name); + existingSeries.OriginalName ??= infos[0].Series; } // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series var librarySeries = library.Series.ToList(); Parallel.ForEach(librarySeries, (series) => { - _logger.LogInformation("Processing series {SeriesName}", series.Name); - UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray()); - series.Pages = series.Volumes.Sum(v => v.Pages); + try + { + _logger.LogInformation("Processing series {SeriesName}", series.OriginalName); + UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray()); + series.Pages = series.Volumes.Sum(v => v.Pages); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an exception updating volumes for {SeriesName}", series.Name); + } }); } public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries) { var foundSeries = parsedSeries.Select(s => s.Key).ToList(); - var missingSeries = existingSeries.Where(es => !es.NameInList(foundSeries) - || !es.NameInList(parsedSeries.Keys)); - return missingSeries; + return existingSeries.Where(es => !es.NameInList(foundSeries)); } - public int RemoveMissingSeries(ICollection existingSeries, IEnumerable missingSeries) + /// + /// Removes all instances of missingSeries' Series from existingSeries Collection. Existing series is updated by + /// reference and the removed element count is returned. + /// + /// Existing Series in DB + /// Series not found on disk or can't be parsed + /// + /// the updated existingSeries + public static ICollection RemoveMissingSeries(ICollection existingSeries, IEnumerable missingSeries, out int removeCount) { - - var removeCount = existingSeries.Count; + var existingCount = existingSeries.Count; var missingList = missingSeries.ToList(); - existingSeries = existingSeries.Except(missingList).ToList(); - // if (existingSeries == null || existingSeries.Count == 0) return 0; - // foreach (var existing in missingSeries) - // { - // existingSeries.Remove(existing); - // removeCount += 1; - // } - removeCount -= existingSeries.Count; + + existingSeries = existingSeries.Where( + s => !missingList.Exists( + m => m.NormalizedName.Equals(s.NormalizedName))).ToList(); - return removeCount; + removeCount = existingCount - existingSeries.Count; + + return existingSeries; } private void UpdateVolumes(Series series, ParserInfo[] parsedInfos) { var startingVolumeCount = series.Volumes.Count; // Add new volumes and update chapters per volume - var distinctVolumes = parsedInfos.Select(p => p.Volumes).Distinct().ToList(); - _logger.LogDebug("Updating {DistinctVolumes} volumes", distinctVolumes.Count); + var distinctVolumes = parsedInfos.DistinctVolumes(); + _logger.LogDebug("Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name); foreach (var volumeNumber in distinctVolumes) { - var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); - var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber); if (volume == null) { - volume = new Volume() - { - Name = volumeNumber, - Number = (int) Parser.Parser.MinimumNumberFromRange(volumeNumber), - IsSpecial = false, - Chapters = new List() - }; + volume = DbFactory.Volume(volumeNumber); series.Volumes.Add(volume); } - // NOTE: I don't think we need this as chapters now handle specials - //volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); + // NOTE: Instead of creating and adding? Why Not Merge a new volume into an existing, so no matter what, new properties,etc get propagated? + _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); - + var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); UpdateChapters(volume, infos); volume.Pages = volume.Chapters.Sum(c => c.Pages); } - - // BUG: This is causing volumes to be removed when they shouldn't - // Remove existing volumes that aren't in parsedInfos and volumes that have no chapters - var existingVolumeLength = series.Volumes.Count; - // var existingVols = series.Volumes; - // foreach (var v in existingVols) - // { - // // NOTE: I think checking if Chapter count is 0 is enough, we don't need parsedInfos - // if (parsedInfos.All(p => p.Volumes != v.Name)) // || v.Chapters.Count == 0 (this wont work yet because we don't take care of chapters correctly vs parsedInfos) - // { - // _logger.LogDebug("Removed {Series} - {Volume} as there were no chapters", series.Name, v.Name); - // series.Volumes.Remove(v); - // } - // } - series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList(); - if (existingVolumeLength != series.Volumes.Count) + + // Remove existing volumes that aren't in parsedInfos + var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList(); + if (series.Volumes.Count != nonDeletedVolumes.Count) { - _logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name", (existingVolumeLength - series.Volumes.Count), series.Name); + _logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name", + (series.Volumes.Count - nonDeletedVolumes.Count), series.Name); + var deletedVolumes = series.Volumes.Except(nonDeletedVolumes); + foreach (var volume in deletedVolumes) + { + var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files"; + if (!new FileInfo(file).Exists) + { + _logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file); + } + _logger.LogDebug("Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file); + } + + series.Volumes = nonDeletedVolumes; } _logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}", series.Name, startingVolumeCount, series.Volumes.Count); } - + + /// + /// + /// + /// + /// private void UpdateChapters(Volume volume, ParserInfo[] parsedInfos) { - var startingChapters = volume.Chapters.Count; - // Add new chapters foreach (var info in parsedInfos) { - var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0")); // Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0 // also are treated like specials for UI grouping. - // NOTE: If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest - // vs Historys strongest), this code will break and the duplicate will be skipped. - Chapter chapter = null; + Chapter chapter; try { - // TODO: Extract to FindExistingChapter() - chapter = specialTreatment - ? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename - || (c.Files.Select(f => f.FilePath) - .Contains(info.FullFilePath))) - : volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters); + chapter = volume.Chapters.GetChapterByRange(info); } catch (Exception ex) { _logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters); - return; + continue; } - - + if (chapter == null) { - _logger.LogDebug("Adding new chapter, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment); - chapter = new Chapter() - { - Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty, - Range = specialTreatment ? info.Filename : info.Chapters, - Files = new List(), - IsSpecial = specialTreatment - }; - volume.Chapters.Add(chapter); + _logger.LogDebug( + "Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters); + volume.Chapters.Add(DbFactory.Chapter(info)); } - - chapter.Files ??= new List(); - chapter.IsSpecial = specialTreatment; + else + { + chapter.UpdateFrom(info); + } + } // Add files foreach (var info in parsedInfos) { - var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0")); + var specialTreatment = info.IsSpecialInfo(); Chapter chapter = null; try { - chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (specialTreatment && c.Range == info.Filename)); + chapter = volume.Chapters.GetChapterByRange(info); } catch (Exception ex) { _logger.LogError(ex, "There was an exception parsing chapter. Skipping {SeriesName} Vol {VolumeNumber} Chapter {ChapterNumber} - Special treatment: {NeedsSpecialTreatment}", info.Series, volume.Name, info.Chapters, specialTreatment); + continue; } if (chapter == null) continue; AddOrUpdateFileForChapter(chapter, info); chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty; chapter.Range = specialTreatment ? info.Filename : info.Chapters; - chapter.Pages = chapter.Files.Sum(f => f.Pages); } @@ -340,11 +367,7 @@ namespace API.Services.Tasks var existingChapters = volume.Chapters.ToList(); foreach (var existingChapter in existingChapters) { - var specialTreatment = (existingChapter.IsSpecial || (existingChapter.Number == "0" && !int.TryParse(existingChapter.Range, out int i))); - var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range) - : parsedInfos.Any(v => v.Chapters == existingChapter.Range); - - if (!hasInfo || existingChapter.Files.Count == 0) + if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter)) { _logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series); volume.Chapters.Remove(existingChapter); @@ -355,13 +378,9 @@ namespace API.Services.Tasks existingChapter.Files = existingChapter.Files .Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath)) .OrderBy(f => f.FilePath, _naturalSort).ToList(); + existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages); } } - - - - _logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}", - startingChapters, volume.Chapters.Count); } /// @@ -393,7 +412,8 @@ namespace API.Services.Tasks _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries); var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries) .Key; - if (!string.IsNullOrEmpty(existingName) && info.Series != existingName) + // BUG: We are comparing info.Series against a normalized string. They should never match. (This can cause series to not delete or parse correctly after a rename) + if (!string.IsNullOrEmpty(existingName)) // && info.Series != existingName { _logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName); return existingName; @@ -411,25 +431,61 @@ namespace API.Services.Tasks /// Library type to determine parsing to perform private void ProcessFile(string path, string rootPath, LibraryType type) { - var info = Parser.Parser.Parse(path, rootPath, type); + ParserInfo info; + if (type == LibraryType.Book && Parser.Parser.IsEpub(path)) + { + info = BookService.ParseInfo(path); + } + else + { + info = Parser.Parser.Parse(path, rootPath, type); + } + if (info == null) { _logger.LogWarning("[Scanner] Could not parse series from {Path}", path); return; } + if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0") + { + info = Parser.Parser.Parse(path, rootPath, type); + var info2 = BookService.ParseInfo(path); + info.Merge(info2); + } + TrackSeries(info); } private MangaFile CreateMangaFile(ParserInfo info) { - return new MangaFile() + switch (info.Format) { - FilePath = info.FullFilePath, - Format = info.Format, - Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) - }; + case MangaFormat.Archive: + { + return new MangaFile() + { + FilePath = info.FullFilePath, + Format = info.Format, + Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) + }; + } + case MangaFormat.Book: + { + return new MangaFile() + { + FilePath = info.FullFilePath, + Format = info.Format, + Pages = _bookService.GetNumberOfPages(info.FullFilePath) + }; + } + default: + _logger.LogWarning("[Scanner] Ignoring {Filename}. Non-archives are not supported", info.Filename); + break; + } + + return null; } private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info) @@ -439,22 +495,21 @@ namespace API.Services.Tasks if (existingFile != null) { existingFile.Format = info.Format; - if (!new FileInfo(existingFile.FilePath).DoesLastWriteMatch(existingFile.LastModified)) + if (!existingFile.HasFileBeenModified() && existingFile.Pages > 0) { - existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); + existingFile.Pages = existingFile.Format == MangaFormat.Book + ? _bookService.GetNumberOfPages(info.FullFilePath) + : _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); } } else { - if (info.Format == MangaFormat.Archive) + var file = CreateMangaFile(info); + if (file != null) { - chapter.Files.Add(CreateMangaFile(info)); + chapter.Files.Add(file); existingFile = chapter.Files.Last(); } - else - { - _logger.LogDebug("Ignoring {Filename}. Non-archives are not supported", info.Filename); - } } if (existingFile != null) diff --git a/API/Startup.cs b/API/Startup.cs index b2a06c866..4d26d933e 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -2,7 +2,7 @@ using System; using System.IO.Compression; using System.Linq; using API.Extensions; -using API.Interfaces.Services; +using API.Interfaces; using API.Middleware; using API.Services; using Hangfire; @@ -15,7 +15,6 @@ using Microsoft.AspNetCore.ResponseCompression; using Microsoft.AspNetCore.StaticFiles; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Hosting; using Microsoft.OpenApi.Models; @@ -24,12 +23,10 @@ namespace API public class Startup { private readonly IConfiguration _config; - private readonly IWebHostEnvironment _env; - public Startup(IConfiguration config, IWebHostEnvironment env) + public Startup(IConfiguration config) { _config = config; - _env = env; } // This method gets called by the runtime. Use this method to add services to the container. @@ -71,16 +68,14 @@ namespace API // Add the processing server as IHostedService services.AddHangfireServer(); - - //services.AddStartupTask(services). - services.AddTransient().TryAddSingleton(services); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. - public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, IHostApplicationLifetime applicationLifetime) + public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, + IHostApplicationLifetime applicationLifetime, ITaskScheduler taskScheduler) { app.UseMiddleware(); - + if (env.IsDevelopment()) { app.UseSwagger(); @@ -136,8 +131,11 @@ namespace API applicationLifetime.ApplicationStopping.Register(OnShutdown); applicationLifetime.ApplicationStarted.Register(() => { - Console.WriteLine("Kavita - v0.3.7"); + Console.WriteLine("Kavita - v0.4.0"); }); + + // Any services that should be bootstrapped go here + taskScheduler.ScheduleTasks(); } private void OnShutdown() diff --git a/INSTALL.txt b/INSTALL.txt index 5d052dccd..a8b83f905 100644 --- a/INSTALL.txt +++ b/INSTALL.txt @@ -1,4 +1,5 @@ How to Install -1. Unzip the archive to a directory that is writable. If on windows, do not place in Program Files -2. Open appsettings.json and modify TokenKey to a random string ideally generated from https://passwordsgenerator.net/ -3. Run Kavita executable \ No newline at end of file +1. Unzip the archive to a directory that is writable. If on windows, do not place in Program Files. +2. (Linux only) Chmod and Chown so Kavita can write to the directory you placed in. +3. Open appsettings.json and modify TokenKey to a random string ideally generated from https://passwordsgenerator.net/ +4. Run Kavita executable \ No newline at end of file diff --git a/build.sh b/build.sh index 51b68e571..d10013968 100644 --- a/build.sh +++ b/build.sh @@ -47,10 +47,10 @@ Build() BuildUI() { ProgressStart 'Building UI' - cd ../kavita-webui/ || exit + cd ../Kavita-webui/ || exit npm install npm run prod - cd ../kavita/ || exit + cd ../Kavita/ || exit ProgressEnd 'Building UI' } @@ -113,6 +113,3 @@ else Package "net5.0" "$RID" cd "$dir" fi - - -