.NET 6 Coding Patterns + Unit Tests (#823)

* Refactored all files to have Interfaces within the same file. Started moving over to file-scoped namespaces.

* Refactored common methods for getting underlying file's cover, pages, and extracting into 1 interface.

* More refactoring around removing dependence on explicit filetype testing for getting information.

* Code is buildable, tests are broken. Huge refactor (not completed) which makes most of DirectoryService testable with a mock filesystem (and thus the services that utilize it).

* Finished porting DirectoryService to use mocked filesystem implementation.

* Added a null check

* Added a null check

* Finished all unit tests for DirectoryService.

* Some misc cleanup on the code

* Fixed up some bugs from refactoring scan loop.

* Implemented CleanupService testing and refactored more of DirectoryService to be non-static.

Fixed a bug where cover file cleanup wasn't properly finding files due to a regex bug.

* Fixed an issue in CleanupBackup() where we weren't properly selecting database files older than 30 days. Finished CleanupService Tests.

* Refactored Flatten and RemoveNonImages to directory service to allow CacheService to be testable.

* Finally have CacheService tested. Rewrote GetCachedPagePath() to be much more straightforward & performant.

* Updated DefaultParserTests.cs to contain all existing tests and follow new test layout format.

* All tests fixed up
This commit is contained in:
Joseph Milazzo 2021-12-05 10:58:53 -06:00 committed by GitHub
parent bf1876ff44
commit bbe8f800f6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
115 changed files with 6734 additions and 5370 deletions

2
.gitignore vendored
View File

@ -519,5 +519,5 @@ API/config/stats/*
API/config/stats/app_stats.json API/config/stats/app_stats.json
API/config/pre-metadata/ API/config/pre-metadata/
API/config/post-metadata/ API/config/post-metadata/
API.Tests/TestResults/
UI/Web/.vscode/settings.json UI/Web/.vscode/settings.json

View File

@ -1,7 +1,6 @@
using System.IO; using System.IO;
using System.IO.Abstractions; using System.IO.Abstractions;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Services;
using API.Parser; using API.Parser;
using API.Services; using API.Services;
using API.Services.Tasks.Scanner; using API.Services.Tasks.Scanner;
@ -25,9 +24,11 @@ namespace API.Benchmark
public ParseScannedFilesBenchmarks() public ParseScannedFilesBenchmarks()
{ {
IBookService bookService = new BookService(_bookLogger); var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
_parseScannedFiles = new ParseScannedFiles(bookService, _logger, _archiveService, _parseScannedFiles = new ParseScannedFiles(
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem())); Substitute.For<ILogger>(),
directoryService,
new ReadingItemService(_archiveService, new BookService(_bookLogger, directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService)), Substitute.For<ImageService>(), directoryService));
} }
// [Benchmark] // [Benchmark]

View File

@ -1,15 +1,27 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO.Abstractions.TestingHelpers;
using System.Linq; using System.Linq;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Parser; using API.Parser;
using API.Services;
using API.Tests.Helpers; using API.Tests.Helpers;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit; using Xunit;
namespace API.Tests.Extensions namespace API.Tests.Extensions
{ {
public class ParserInfoListExtensions public class ParserInfoListExtensions
{ {
private readonly DefaultParser _defaultParser;
public ParserInfoListExtensions()
{
_defaultParser =
new DefaultParser(new DirectoryService(Substitute.For<ILogger<DirectoryService>>(),
new MockFileSystem()));
}
[Theory] [Theory]
[InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})] [InlineData(new[] {"1", "1", "3-5", "5", "8", "0", "0"}, new[] {"1", "3-5", "5", "8", "0"})]
public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers) public void DistinctVolumesTest(string[] volumeNumbers, string[] expectedNumbers)
@ -27,7 +39,7 @@ namespace API.Tests.Extensions
var infos = new List<ParserInfo>(); var infos = new List<ParserInfo>();
foreach (var filename in inputInfos) foreach (var filename in inputInfos)
{ {
infos.Add(API.Parser.Parser.Parse( infos.Add(_defaultParser.Parse(
filename, filename,
string.Empty)); string.Empty));
} }

View File

@ -1,6 +1,10 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO.Abstractions.TestingHelpers;
using API.Entities.Enums; using API.Entities.Enums;
using API.Parser; using API.Parser;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit; using Xunit;
using Xunit.Abstractions; using Xunit.Abstractions;
@ -9,10 +13,14 @@ namespace API.Tests.Parser
public class ComicParserTests public class ComicParserTests
{ {
private readonly ITestOutputHelper _testOutputHelper; private readonly ITestOutputHelper _testOutputHelper;
private readonly DefaultParser _defaultParser;
public ComicParserTests(ITestOutputHelper testOutputHelper) public ComicParserTests(ITestOutputHelper testOutputHelper)
{ {
_testOutputHelper = testOutputHelper; _testOutputHelper = testOutputHelper;
_defaultParser =
new DefaultParser(new DirectoryService(Substitute.For<ILogger<DirectoryService>>(),
new MockFileSystem()));
} }
[Theory] [Theory]
@ -158,72 +166,5 @@ namespace API.Tests.Parser
{ {
Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseComicSpecial(input))); Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseComicSpecial(input)));
} }
[Fact]
public void ParseInfoTest()
{
const string rootPath = @"E:/Comics/";
var expected = new Dictionary<string, ParserInfo>();
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Teen Titans", Volumes = "0",
Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath
});
// Fallback test with bad naming
filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Babe", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman the Detective", Volumes = "6", Edition = "",
Chapters = "11", Filename = "Batman the Detective - v6 - 11 - (2021).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
var actual = API.Parser.Parser.Parse(file, rootPath, LibraryType.Comic);
if (expectedInfo == null)
{
Assert.Null(actual);
return;
}
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {file}");
Assert.Equal(expectedInfo.Format, actual.Format);
_testOutputHelper.WriteLine("Format ✓");
Assert.Equal(expectedInfo.Series, actual.Series);
_testOutputHelper.WriteLine("Series ✓");
Assert.Equal(expectedInfo.Chapters, actual.Chapters);
_testOutputHelper.WriteLine("Chapters ✓");
Assert.Equal(expectedInfo.Volumes, actual.Volumes);
_testOutputHelper.WriteLine("Volumes ✓");
Assert.Equal(expectedInfo.Edition, actual.Edition);
_testOutputHelper.WriteLine("Edition ✓");
Assert.Equal(expectedInfo.Filename, actual.Filename);
_testOutputHelper.WriteLine("Filename ✓");
Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
}
}
} }
} }

View File

@ -0,0 +1,303 @@
using System.Collections.Generic;
using System.IO.Abstractions.TestingHelpers;
using API.Entities.Enums;
using API.Parser;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Parser;
public class DefaultParserTests
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly DefaultParser _defaultParser;
public DefaultParserTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem());
_defaultParser = new DefaultParser(directoryService);
}
#region ParseFromFallbackFolders
[Theory]
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
[InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")]
[InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")]
public void ParseFromFallbackFolders_FallbackShouldParseSeries(string rootDir, string inputPath, string expectedSeries)
{
var actual = _defaultParser.Parse(inputPath, rootDir);
if (actual == null)
{
Assert.NotNull(actual);
return;
}
Assert.Equal(expectedSeries, actual.Series);
}
[Theory]
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
[InlineData("/manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster #8~0~1")]
public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string expectedParseInfo)
{
const string rootDirectory = "/manga/";
var tokens = expectedParseInfo.Split("~");
var actual = new ParserInfo {Chapters = "0", Volumes = "0"};
_defaultParser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(tokens[0], actual.Series);
Assert.Equal(tokens[1], actual.Volumes);
Assert.Equal(tokens[2], actual.Chapters);
}
#endregion
#region Parse
[Fact]
public void Parse_ParseInfo_Manga()
{
const string rootPath = @"E:/Manga/";
var expected = new Dictionary<string, ParserInfo>();
var filepath = @"E:/Manga/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Mujaki no Rakuen", Volumes = "12",
Chapters = "76", Filename = "Mujaki no Rakuen Vol12 ch76.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:/Manga/Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
expected.Add(filepath, new ParserInfo
{
Series = "Beelzebub", Volumes = "0",
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
expected.Add(filepath, new ParserInfo
{
Series = "Ichinensei ni Nacchattara", Volumes = "1",
Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition",
Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Dorohedoro", Volumes = "1", Edition = "",
Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "APOSIMZ", Volumes = "0", Edition = "",
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Summer Time Rendering", Volumes = "0", Edition = "",
Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = true
});
filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
Chapters = "0", Filename = "Vol. 00 Ch. 000.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
Chapters = "1", Filename = "Vol. 01 Ch. 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
expected.Add(filepath, new ParserInfo
{
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
FullFilePath = filepath, IsSpecial = false
});
// If an image is cover exclusively, ignore it
filepath = @"E:\Manga\Seraph of the End\cover.png";
expected.Add(filepath, null);
filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "The Beginning After the End", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
expected.Add(filepath, new ParserInfo
{
Series = "Monster #8", Volumes = "0", Edition = "",
Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
var actual = _defaultParser.Parse(file, rootPath);
if (expectedInfo == null)
{
Assert.Null(actual);
return;
}
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {file}");
Assert.Equal(expectedInfo.Format, actual.Format);
_testOutputHelper.WriteLine("Format ✓");
Assert.Equal(expectedInfo.Series, actual.Series);
_testOutputHelper.WriteLine("Series ✓");
Assert.Equal(expectedInfo.Chapters, actual.Chapters);
_testOutputHelper.WriteLine("Chapters ✓");
Assert.Equal(expectedInfo.Volumes, actual.Volumes);
_testOutputHelper.WriteLine("Volumes ✓");
Assert.Equal(expectedInfo.Edition, actual.Edition);
_testOutputHelper.WriteLine("Edition ✓");
Assert.Equal(expectedInfo.Filename, actual.Filename);
_testOutputHelper.WriteLine("Filename ✓");
Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
}
}
[Fact]
public void Parse_ParseInfo_Comic()
{
const string rootPath = @"E:/Comics/";
var expected = new Dictionary<string, ParserInfo>();
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Teen Titans", Volumes = "0",
Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath
});
// Fallback test with bad naming
filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Babe", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman the Detective", Volumes = "6", Edition = "",
Chapters = "11", Filename = "Batman the Detective - v6 - 11 - (2021).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman - The Man Who Laughs", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
var actual = _defaultParser.Parse(file, rootPath, LibraryType.Comic);
if (expectedInfo == null)
{
Assert.Null(actual);
return;
}
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {file}");
Assert.Equal(expectedInfo.Format, actual.Format);
_testOutputHelper.WriteLine("Format ✓");
Assert.Equal(expectedInfo.Series, actual.Series);
_testOutputHelper.WriteLine("Series ✓");
Assert.Equal(expectedInfo.Chapters, actual.Chapters);
_testOutputHelper.WriteLine("Chapters ✓");
Assert.Equal(expectedInfo.Volumes, actual.Volumes);
_testOutputHelper.WriteLine("Volumes ✓");
Assert.Equal(expectedInfo.Edition, actual.Edition);
_testOutputHelper.WriteLine("Edition ✓");
Assert.Equal(expectedInfo.Filename, actual.Filename);
_testOutputHelper.WriteLine("Filename ✓");
Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
}
}
#endregion
}

View File

@ -294,194 +294,6 @@ namespace API.Tests.Parser
} }
[Theory]
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
[InlineData("/manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", "Monster #8~0~1")]
public void ParseFromFallbackFoldersTest(string inputFile, string expectedParseInfo)
{
const string rootDirectory = "/manga/";
var tokens = expectedParseInfo.Split("~");
var actual = new ParserInfo {Chapters = "0", Volumes = "0"};
API.Parser.Parser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(tokens[0], actual.Series);
Assert.Equal(tokens[1], actual.Volumes);
Assert.Equal(tokens[2], actual.Chapters);
} }
[Fact]
public void ParseInfoTest()
{
const string rootPath = @"E:/Manga/";
var expected = new Dictionary<string, ParserInfo>();
var filepath = @"E:/Manga/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Mujaki no Rakuen", Volumes = "12",
Chapters = "76", Filename = "Mujaki no Rakuen Vol12 ch76.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:/Manga/Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen/Vol 1.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
Chapters = "0", Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
expected.Add(filepath, new ParserInfo
{
Series = "Beelzebub", Volumes = "0",
Chapters = "1", Filename = "Beelzebub_01_[Noodles].zip", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
expected.Add(filepath, new ParserInfo
{
Series = "Ichinensei ni Nacchattara", Volumes = "1",
Chapters = "1", Filename = "Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Tenjo Tenge", Volumes = "1", Edition = "Full Contact Edition",
Chapters = "0", Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
Chapters = "0", Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Dorohedoro", Volumes = "1", Edition = "",
Chapters = "0", Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "APOSIMZ", Volumes = "0", Edition = "",
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = "0", Edition = "",
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Goblin Slayer - Brand New Day", Volumes = "0", Edition = "",
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Summer Time Rendering", Volumes = "0", Edition = "",
Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = true
});
filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
Chapters = "0", Filename = "Vol. 00 Ch. 000.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
Chapters = "1", Filename = "Vol. 01 Ch. 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
expected.Add(filepath, new ParserInfo
{
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
FullFilePath = filepath, IsSpecial = false
});
// If an image is cover exclusively, ignore it
filepath = @"E:\Manga\Seraph of the End\cover.png";
expected.Add(filepath, null);
filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "The Beginning After the End", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
expected.Add(filepath, new ParserInfo
{
Series = "Monster #8", Volumes = "0", Edition = "",
Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
var actual = API.Parser.Parser.Parse(file, rootPath);
if (expectedInfo == null)
{
Assert.Null(actual);
return;
}
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {file}");
Assert.Equal(expectedInfo.Format, actual.Format);
_testOutputHelper.WriteLine("Format ✓");
Assert.Equal(expectedInfo.Series, actual.Series);
_testOutputHelper.WriteLine("Series ✓");
Assert.Equal(expectedInfo.Chapters, actual.Chapters);
_testOutputHelper.WriteLine("Chapters ✓");
Assert.Equal(expectedInfo.Volumes, actual.Volumes);
_testOutputHelper.WriteLine("Volumes ✓");
Assert.Equal(expectedInfo.Edition, actual.Edition);
_testOutputHelper.WriteLine("Edition ✓");
Assert.Equal(expectedInfo.Filename, actual.Filename);
_testOutputHelper.WriteLine("Filename ✓");
Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
}
}
}
} }

View File

@ -6,6 +6,7 @@ namespace API.Tests.Parser
public class ParserTests public class ParserTests
{ {
[Theory] [Theory]
[InlineData("Beastars - SP01", true)] [InlineData("Beastars - SP01", true)]
[InlineData("Beastars SP01", true)] [InlineData("Beastars SP01", true)]
@ -147,21 +148,7 @@ namespace API.Tests.Parser
Assert.Equal(expected, CleanAuthor(expected)); Assert.Equal(expected, CleanAuthor(expected));
} }
[Theory]
[InlineData("C:/", "C:/Love Hina/Love Hina - Special.cbz", "Love Hina")]
[InlineData("C:/", "C:/Love Hina/Specials/Ani-Hina Art Collection.cbz", "Love Hina")]
[InlineData("C:/", "C:/Mujaki no Rakuen Something/Mujaki no Rakuen Vol12 ch76.cbz", "Mujaki no Rakuen")]
public void FallbackTest(string rootDir, string inputPath, string expectedSeries)
{
var actual = Parse(inputPath, rootDir);
if (actual == null)
{
Assert.NotNull(actual);
return;
}
Assert.Equal(expectedSeries, actual.Series);
}
[Theory] [Theory]
[InlineData("Love Hina - Special.jpg", false)] [InlineData("Love Hina - Special.jpg", false)]

View File

@ -1,10 +1,11 @@
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.IO.Abstractions;
using System.IO.Abstractions.TestingHelpers; using System.IO.Abstractions.TestingHelpers;
using System.IO.Compression; using System.IO.Compression;
using System.Linq;
using API.Archive; using API.Archive;
using API.Data.Metadata; using API.Data.Metadata;
using API.Interfaces.Services;
using API.Services; using API.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NSubstitute; using NSubstitute;
@ -20,12 +21,12 @@ namespace API.Tests.Services
private readonly ArchiveService _archiveService; private readonly ArchiveService _archiveService;
private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>(); private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>();
private readonly ILogger<DirectoryService> _directoryServiceLogger = Substitute.For<ILogger<DirectoryService>>(); private readonly ILogger<DirectoryService> _directoryServiceLogger = Substitute.For<ILogger<DirectoryService>>();
private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()); private readonly IDirectoryService _directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
public ArchiveServiceTests(ITestOutputHelper testOutputHelper) public ArchiveServiceTests(ITestOutputHelper testOutputHelper)
{ {
_testOutputHelper = testOutputHelper; _testOutputHelper = testOutputHelper;
_archiveService = new ArchiveService(_logger, _directoryService); _archiveService = new ArchiveService(_logger, _directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService));
} }
[Theory] [Theory]
@ -108,15 +109,15 @@ namespace API.Tests.Services
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction"); var extractDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives/Extraction");
DirectoryService.ClearAndDeleteDirectory(extractDirectory); _directoryService.ClearAndDeleteDirectory(extractDirectory);
Stopwatch sw = Stopwatch.StartNew(); var sw = Stopwatch.StartNew();
_archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory); _archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory);
var di1 = new DirectoryInfo(extractDirectory); var di1 = new DirectoryInfo(extractDirectory);
Assert.Equal(expectedFileCount, di1.Exists ? di1.GetFiles().Length : 0); Assert.Equal(expectedFileCount, di1.Exists ? _directoryService.GetFiles(extractDirectory, searchOption:SearchOption.AllDirectories).Count() : 0);
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
DirectoryService.ClearAndDeleteDirectory(extractDirectory); _directoryService.ClearAndDeleteDirectory(extractDirectory);
} }
@ -167,8 +168,8 @@ namespace API.Tests.Services
var sw = Stopwatch.StartNew(); var sw = Stopwatch.StartNew();
var outputDir = Path.Join(testDirectory, "output"); var outputDir = Path.Join(testDirectory, "output");
DirectoryService.ClearAndDeleteDirectory(outputDir); _directoryService.ClearAndDeleteDirectory(outputDir);
DirectoryService.ExistOrCreate(outputDir); _directoryService.ExistOrCreate(outputDir);
var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile), var coverImagePath = archiveService.GetCoverImage(Path.Join(testDirectory, inputFile),
@ -178,7 +179,7 @@ namespace API.Tests.Services
Assert.Equal(expectedBytes, actual); Assert.Equal(expectedBytes, actual);
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms"); _testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
DirectoryService.ClearAndDeleteDirectory(outputDir); _directoryService.ClearAndDeleteDirectory(outputDir);
} }

View File

@ -0,0 +1,143 @@
using System.Collections.Generic;
using System.Data.Common;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Services;
using API.Services.Tasks;
using API.SignalR;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
public class BackupServiceTests
{
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly IConfiguration _config;
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string LogDirectory = "C:/kavita/config/logs/";
public BackupServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
_config = Substitute.For<IConfiguration>();
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = "C:/data/"
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(LogDirectory);
fileSystem.AddDirectory("C:/data/");
return fileSystem;
}
#endregion
#region GetLogFiles
public void GetLogFiles_ExpectAllFiles_NoRollingFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{LogDirectory}kavita.log", new MockFileData(""));
filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// You can't mock _config extensions because they are static
_config.GetMaxRollingFiles().Returns(1);
_config.GetLoggingFileName().Returns(ds.FileSystem.Path.Join(LogDirectory, "kavita.log"));
var backupService = new BackupService(_logger, _unitOfWork, ds, _config, _messageHub);
Assert.Single(backupService.GetLogFiles(1, LogDirectory));
}
#endregion
}

View File

@ -1,5 +1,5 @@
using System.IO; using System.IO;
using API.Interfaces.Services; using System.IO.Abstractions;
using API.Services; using API.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NSubstitute; using NSubstitute;
@ -14,7 +14,8 @@ namespace API.Tests.Services
public BookServiceTests() public BookServiceTests()
{ {
_bookService = new BookService(_logger); var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
_bookService = new BookService(_logger, directoryService, new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService));
} }
[Theory] [Theory]

View File

@ -1,115 +1,440 @@
namespace API.Tests.Services using System.Collections.Generic;
using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Services;
using API.SignalR;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services
{ {
public class CacheServiceTests public class CacheServiceTests
{ {
// private readonly CacheService _cacheService; private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
// private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>(); private readonly IUnitOfWork _unitOfWork;
// private readonly IUnitOfWork _unitOfWork = Substitute.For<IUnitOfWork>(); private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
// private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
// private readonly IDirectoryService _directoryService = Substitute.For<DirectoryService>(); private readonly DbConnection _connection;
// private readonly DataContext _context;
// public CacheServiceTests()
// { private const string CacheDirectory = "C:/kavita/config/cache/";
// _cacheService = new CacheService(_logger, _unitOfWork, _archiveService, _directoryService); private const string CoverImageDirectory = "C:/kavita/config/covers/";
// } private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = "C:/data/"
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
#region Ensure
[Fact]
public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
filesystem.AddDirectory($"{CacheDirectory}1/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
await ResetDB();
var s = DbFactory.Series("Test");
var v = DbFactory.Volume("1");
var c = new Chapter()
{
Number = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Format = MangaFormat.Archive,
FilePath = $"{DataDirectory}Test v1.zip",
}
}
};
v.Chapters.Add(c);
s.Volumes.Add(v);
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
await cleanupService.Ensure(1);
Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
}
// [Fact] // [Fact]
// public async void Ensure_ShouldExtractArchive(int chapterId) // public async Task Ensure_DirectoryAlreadyExists_ExtractsImages()
// { // {
// // TODO: Figure out a way to test this
// var filesystem = CreateFileSystem();
// filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
// filesystem.AddDirectory($"{CacheDirectory}1/");
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var archiveService = Substitute.For<IArchiveService>();
// archiveService.ExtractArchive($"{DataDirectory}Test v1.zip",
// filesystem.Path.Join(CacheDirectory, "1"));
// var cleanupService = new CacheService(_logger, _unitOfWork, ds,
// new ReadingItemService(archiveService, Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// //
// // CacheDirectory needs to be customized. // await ResetDB();
// _unitOfWork.VolumeRepository.GetChapterAsync(chapterId).Returns(new Chapter // var s = DbFactory.Series("Test");
// var v = DbFactory.Volume("1");
// var c = new Chapter()
// { // {
// Id = 1, // Number = "1",
// Files = new List<MangaFile>() // Files = new List<MangaFile>()
// { // {
// new MangaFile() // new MangaFile()
// { // {
// FilePath = "" // Format = MangaFormat.Archive,
// FilePath = $"{DataDirectory}Test v1.zip",
// } // }
// } // }
// }); // };
// v.Chapters.Add(c);
// s.Volumes.Add(v);
// s.LibraryId = 1;
// _context.Series.Add(s);
// //
// await _cacheService.Ensure(1); // await _context.SaveChangesAsync();
//
// var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/CacheService/Archives");
// //
// await cleanupService.Ensure(1);
// Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
// } // }
//string GetCachedPagePath(Volume volume, int page)
// [Fact] #endregion
// //[InlineData("", 0, "")]
// public void GetCachedPagePathTest_Should() #region CleanupChapters
// {
// [Fact]
// // string archivePath = "flat file.zip"; public void CleanupChapters_AllFilesShouldBeDeleted()
// // int pageNum = 0; {
// // string expected = "cache/1/pexels-photo-6551949.jpg"; var filesystem = CreateFileSystem();
// // filesystem.AddDirectory($"{CacheDirectory}1/");
// // var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives"); filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData(""));
// // var file = Path.Join(testDirectory, archivePath); filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData(""));
// // var volume = new Volume filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData(""));
// // { var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// // Id = 1, var cleanupService = new CacheService(_logger, _unitOfWork, ds,
// // Files = new List<MangaFile>() new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// // {
// // new() cleanupService.CleanupChapters(new []{1, 3});
// // { Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
// // Id = 1, }
// // Chapter = 0,
// // FilePath = archivePath,
// // Format = MangaFormat.Archive, #endregion
// // Pages = 1,
// // } #region GetCachedEpubFile
// // },
// // Name = "1", [Fact]
// // Number = 1 public void GetCachedEpubFile_ShouldReturnFirstEpub()
// // }; {
// // var filesystem = CreateFileSystem();
// // var cacheService = Substitute.ForPartsOf<CacheService>(); filesystem.AddDirectory($"{CacheDirectory}1/");
// // cacheService.Configure().CacheDirectoryIsAccessible().Returns(true); filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData(""));
// // cacheService.Configure().GetVolumeCachePath(1, volume.Files.ElementAt(0)).Returns("cache/1/"); filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData(""));
// // _directoryService.Configure().GetFilesWithExtension("cache/1/").Returns(new string[] {"pexels-photo-6551949.jpg"}); var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// // Assert.Equal(expected, _cacheService.GetCachedPagePath(volume, pageNum)); var cs = new CacheService(_logger, _unitOfWork, ds,
// //Assert.True(true); new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// }
// var c = new Chapter()
// [Fact] {
// public void GetOrderedChaptersTest() Files = new List<MangaFile>()
// { {
// // var files = new List<Chapter>() new MangaFile()
// // { {
// // new() FilePath = $"{DataDirectory}1.epub"
// // { },
// // Number = "1" new MangaFile()
// // }, {
// // new() FilePath = $"{DataDirectory}2.epub"
// // { }
// // Chapter = 2 }
// // }, };
// // new() cs.GetCachedEpubFile(1, c);
// // { Assert.Same($"{DataDirectory}1.epub", cs.GetCachedEpubFile(1, c));
// // Chapter = 0 }
// // },
// // }; #endregion
// // var expected = new List<MangaFile>()
// // { #region GetCachedPagePath
// // new()
// // { [Fact]
// // Chapter = 1 public void GetCachedPagePath_ReturnNullIfNoFiles()
// // }, {
// // new() var filesystem = CreateFileSystem();
// // { filesystem.AddDirectory($"{CacheDirectory}1/");
// // Chapter = 2 filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
// // }, filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
// // new()
// // { var c = new Chapter()
// // Chapter = 0 {
// // }, Id = 1,
// // }; Files = new List<MangaFile>()
// // Assert.NotStrictEqual(expected, _cacheService.GetOrderedChapters(files)); };
// }
// var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages - 1; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
var path = cs.GetCachedPagePath(c, 11);
Assert.Equal(string.Empty, path);
}
[Fact]
public void GetCachedPagePath_GetFileFromFirstFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c, 0)));
}
[Fact]
public void GetCachedPagePath_GetLastPageFromSingleFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
}
}
};
c.Pages = c.Files.Sum(f => f.Pages);
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the 10th file
var path = cs.GetCachedPagePath(c, c.Pages);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
[Fact]
public void GetCachedPagePath_GetFileFromSecondFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(), Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the page + 1 file
var path = cs.GetCachedPagePath(c, 10);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
#endregion
} }
} }

View File

@ -0,0 +1,359 @@
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Services;
using API.Services.Tasks;
using API.SignalR;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
public class CleanupServiceTests
{
private readonly ILogger<CleanupService> _logger = Substitute.For<ILogger<CleanupService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
public CleanupServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = "C:/data/"
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory("C:/data/");
return fileSystem;
}
#endregion
#region DeleteSeriesCoverImages
[Fact]
public async Task DeleteSeriesCoverImages_ShouldDeleteAll()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CoverImageDirectory}series_01.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}series_03.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}series_1000.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDB();
var s = DbFactory.Series("Test 1");
s.CoverImage = "series_01.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
s = DbFactory.Series("Test 2");
s.CoverImage = "series_03.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
s = DbFactory.Series("Test 3");
s.CoverImage = "series_1000.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
await cleanupService.DeleteSeriesCoverImages();
Assert.Empty(ds.GetFiles(CoverImageDirectory));
}
[Fact]
public async Task DeleteSeriesCoverImages_ShouldNotDeleteLinkedFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CoverImageDirectory}series_01.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}series_03.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}series_1000.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDB();
// Add 2 series with cover images
var s = DbFactory.Series("Test 1");
s.CoverImage = "series_01.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
s = DbFactory.Series("Test 2");
s.CoverImage = "series_03.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
await cleanupService.DeleteSeriesCoverImages();
Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
}
#endregion
#region DeleteChapterCoverImages
[Fact]
public async Task DeleteChapterCoverImages_ShouldNotDeleteLinkedFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CoverImageDirectory}v01_c01.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}v01_c03.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}v01_c1000.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDB();
// Add 2 series with cover images
var s = DbFactory.Series("Test 1");
var v = DbFactory.Volume("1");
v.Chapters.Add(new Chapter()
{
CoverImage = "v01_c01.jpg"
});
v.CoverImage = "v01_c01.jpg";
s.Volumes.Add(v);
s.CoverImage = "series_01.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
s = DbFactory.Series("Test 2");
v = DbFactory.Volume("1");
v.Chapters.Add(new Chapter()
{
CoverImage = "v01_c03.jpg"
});
v.CoverImage = "v01_c03jpg";
s.Volumes.Add(v);
s.CoverImage = "series_03.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
await cleanupService.DeleteChapterCoverImages();
Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
}
#endregion
#region DeleteTagCoverImages
[Fact]
public async Task DeleteTagCoverImages_ShouldNotDeleteLinkedFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CoverImageDirectory}tag_01.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}tag_02.jpg", new MockFileData(""));
filesystem.AddFile($"{CoverImageDirectory}tag_1000.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDB();
// Add 2 series with cover images
var s = DbFactory.Series("Test 1");
s.Metadata.CollectionTags = new List<CollectionTag>();
s.Metadata.CollectionTags.Add(new CollectionTag()
{
Title = "Something",
CoverImage ="tag_01.jpg"
});
s.CoverImage = "series_01.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
s = DbFactory.Series("Test 2");
s.Metadata.CollectionTags = new List<CollectionTag>();
s.Metadata.CollectionTags.Add(new CollectionTag()
{
Title = "Something 2",
CoverImage ="tag_02.jpg"
});
s.CoverImage = "series_03.jpg";
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
await cleanupService.DeleteTagCoverImages();
Assert.Equal(2, ds.GetFiles(CoverImageDirectory).Count());
}
#endregion
#region CleanupCacheDirectory
[Fact]
public void CleanupCacheDirectory_ClearAllFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}01.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
cleanupService.CleanupCacheDirectory();
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
}
[Fact]
public void CleanupCacheDirectory_ClearAllFilesInSubDirectory()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}01.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
cleanupService.CleanupCacheDirectory();
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
}
#endregion
#region CleanupBackups
[Fact]
public void CleanupBackups_LeaveOneFile_SinceAllAreExpired()
{
var filesystem = CreateFileSystem();
var filesystemFile = new MockFileData("")
{
CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31))
};
filesystem.AddFile($"{BackupDirectory}kavita_backup_11_29_2021_12_00_13 AM.zip", filesystemFile);
filesystem.AddFile($"{BackupDirectory}kavita_backup_12_3_2021_9_27_58 AM.zip", filesystemFile);
filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
cleanupService.CleanupBackups();
Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories));
}
[Fact]
public void CleanupBackups_LeaveLestExpired()
{
var filesystem = CreateFileSystem();
var filesystemFile = new MockFileData("")
{
CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31))
};
filesystem.AddFile($"{BackupDirectory}kavita_backup_11_29_2021_12_00_13 AM.zip", filesystemFile);
filesystem.AddFile($"{BackupDirectory}kavita_backup_12_3_2021_9_27_58 AM.zip", filesystemFile);
filesystem.AddFile($"{BackupDirectory}randomfile.zip", new MockFileData("")
{
CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(14))
});
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
ds);
cleanupService.CleanupBackups();
Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip"));
}
#endregion
}

View File

@ -1,8 +1,11 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.IO.Abstractions;
using System.IO.Abstractions.TestingHelpers; using System.IO.Abstractions.TestingHelpers;
using System.Linq; using System.Linq;
using System.Text;
using System.Threading.Tasks;
using API.Services; using API.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NSubstitute; using NSubstitute;
@ -18,87 +21,607 @@ namespace API.Tests.Services
public DirectoryServiceTests() public DirectoryServiceTests()
{ {
_directoryService = new DirectoryService(_logger, new MockFileSystem()); var filesystem = new MockFileSystem()
{
};
_directoryService = new DirectoryService(_logger, filesystem);
} }
#region TraverseTreeParallelForEach
[Fact] [Fact]
public void GetFilesTest_Should_Be28() public void TraverseTreeParallelForEach_JustArchives_ShouldBe28()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga"); var testDirectory = "/manga/";
// ReSharper disable once CollectionNeverQueried.Local var fileSystem = new MockFileSystem();
for (var i = 0; i < 28; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = new List<string>(); var files = new List<string>();
var fileCount = _directoryService.TraverseTreeParallelForEach(testDirectory, s => files.Add(s), var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s),
API.Parser.Parser.ArchiveFileExtensions, _logger); API.Parser.Parser.ArchiveFileExtensions, _logger);
Assert.Equal(28, fileCount); Assert.Equal(28, fileCount);
Assert.Equal(28, files.Count);
} }
[Fact] [Fact]
public void GetFiles_WithCustomRegex_ShouldPass_Test() public void TraverseTreeParallelForEach_DontCountExcludedDirectories_ShouldBe28()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/regex"); var testDirectory = "/manga/";
var files = DirectoryService.GetFiles(testDirectory, @"file\d*.txt"); var fileSystem = new MockFileSystem();
Assert.Equal(2, files.Count()); for (var i = 0; i < 28; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{Path.Join(testDirectory, "@eaDir")}file_{29}.jpg", new MockFileData(""));
fileSystem.AddFile($"{Path.Join(testDirectory, ".DS_Store")}file_{30}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = new List<string>();
var fileCount = ds.TraverseTreeParallelForEach(testDirectory, s => files.Add(s),
API.Parser.Parser.ArchiveFileExtensions, _logger);
Assert.Equal(28, fileCount);
Assert.Equal(28, files.Count);
}
#endregion
#region GetFilesWithCertainExtensions
[Fact]
public void GetFilesWithCertainExtensions_ShouldBe10()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFilesWithExtension(testDirectory, API.Parser.Parser.ArchiveFileExtensions);
Assert.Equal(10, files.Length);
Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip"));
} }
[Fact] [Fact]
public void GetFiles_TopLevel_ShouldBeEmpty_Test() public void GetFilesWithCertainExtensions_OnlyArchives()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService"); const string testDirectory = "/manga/";
var files = DirectoryService.GetFiles(testDirectory); var fileSystem = new MockFileSystem();
Assert.Empty(files); for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}file_{29}.rar", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFilesWithExtension(testDirectory, ".zip|.rar");
Assert.Equal(11, files.Length);
}
#endregion
#region GetFiles
[Fact]
public void GetFiles_ArchiveOnly_ShouldBe10()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory, API.Parser.Parser.ArchiveFileExtensions).ToList();
Assert.Equal(10, files.Count());
Assert.All(files, s => fileSystem.Path.GetExtension(s).Equals(".zip"));
} }
[Fact] [Fact]
public void GetFilesWithExtensions_ShouldBeEmpty_Test() public void GetFiles_All_ShouldBe11()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/extensions"); const string testDirectory = "/manga/";
var files = DirectoryService.GetFiles(testDirectory, "*.txt"); var fileSystem = new MockFileSystem();
Assert.Empty(files); for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory).ToList();
Assert.Equal(11, files.Count());
} }
[Fact] [Fact]
public void GetFilesWithExtensions_Test() public void GetFiles_All_MixedPathSeparators()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/extension"); const string testDirectory = "/manga/";
var files = DirectoryService.GetFiles(testDirectory, ".cbz|.rar"); var fileSystem = new MockFileSystem();
Assert.Equal(3, files.Count()); for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"/manga\\file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory).ToList();
Assert.Equal(11, files.Count());
} }
[Fact] [Fact]
public void GetFilesWithExtensions_BadDirectory_ShouldBeEmpty_Test() public void GetFiles_All_TopDirectoryOnly_ShouldBe10()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/doesntexist"); const string testDirectory = "/manga/";
var files = DirectoryService.GetFiles(testDirectory, ".cbz|.rar"); var fileSystem = new MockFileSystem();
Assert.Empty(files); for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory).ToList();
Assert.Equal(10, files.Count());
} }
[Fact] [Fact]
public void ListDirectory_SubDirectory_Test() public void GetFiles_WithSubDirectories_ShouldCountOnlyTopLevel()
{ {
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/DirectoryService/"); const string testDirectory = "/manga/";
var dirs = _directoryService.ListDirectory(testDirectory); var fileSystem = new MockFileSystem();
Assert.Contains(dirs, s => s.Contains("regex")); for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}/SubDir/file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory).ToList();
Assert.Equal(10, files.Count());
} }
[Fact] [Fact]
public void ListDirectory_NoSubDirectory_Test() public void GetFiles_ShouldNotReturnFilesThatAreExcluded()
{ {
var dirs = _directoryService.ListDirectory(""); const string testDirectory = "/manga/";
Assert.DoesNotContain(dirs, s => s.Contains("regex")); var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
} }
fileSystem.AddFile($"{testDirectory}/._file_{29}.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory).ToList();
Assert.Equal(10, files.Count());
}
[Fact]
public void GetFiles_WithCustomRegex_ShouldBe10()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}data-{i}.txt", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData(""));
fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory, @".*d.*\.txt");
Assert.Equal(11, files.Count());
}
[Fact]
public void GetFiles_WithCustomRegexThatContainsFolder_ShouldBe10()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData(""));
fileSystem.AddFile($"{testDirectory}0d.txt", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var files = ds.GetFiles(testDirectory, @".*d.*\.txt", SearchOption.AllDirectories);
Assert.Equal(11, files.Count());
}
#endregion
#region GetTotalSize
[Fact]
public void GetTotalSize_ShouldBeGreaterThan0()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc"));
}
fileSystem.AddFile($"{testDirectory}joe.txt", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var fileSize = ds.GetTotalSize(fileSystem.AllFiles);
Assert.True(fileSize > 0);
}
#endregion
#region CopyFileToDirectory
[Fact]
public void CopyFileToDirectory_ShouldCopyFileToNonExistentDirectory()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/");
Assert.True(fileSystem.FileExists("manga/output/data-0.txt"));
Assert.True(fileSystem.FileExists("manga/file/data-0.txt"));
}
[Fact]
public void CopyFileToDirectory_ShouldCopyFileToExistingDirectoryAndOverwrite()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyFileToDirectory($"{testDirectory}file/data-0.txt", "/manga/output/");
Assert.True(fileSystem.FileExists("/manga/output/data-0.txt"));
Assert.True(fileSystem.FileExists("/manga/file/data-0.txt"));
Assert.True(fileSystem.FileInfo.FromFileName("/manga/file/data-0.txt").Length == fileSystem.FileInfo.FromFileName("/manga/output/data-0.txt").Length);
}
#endregion
#region CopyDirectoryToDirectory
[Fact]
public void CopyDirectoryToDirectory_ShouldThrowWhenSourceDestinationDoesntExist()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}output/data-0.txt", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var ex = Assert.Throws<DirectoryNotFoundException>(() => ds.CopyDirectoryToDirectory("/comics/", "/manga/output/"));
Assert.Equal(ex.Message, "Source directory does not exist or could not be found: " + "/comics/");
}
[Fact]
public void CopyDirectoryToDirectory_ShouldCopyEmptyDirectory()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc"));
fileSystem.AddDirectory($"{testDirectory}empty/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyDirectoryToDirectory($"{testDirectory}empty/", "/manga/output/");
Assert.Empty(fileSystem.DirectoryInfo.FromDirectoryName("/manga/output/").GetFiles());
}
[Fact]
public void CopyDirectoryToDirectory_ShouldCopyAllFileAndNestedDirectoriesOver()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-1.txt", new MockFileData("abc"));
fileSystem.AddDirectory($"{testDirectory}empty/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyDirectoryToDirectory($"{testDirectory}", "/manga/output/");
Assert.Equal(2, ds.GetFiles("/manga/output/", searchOption: SearchOption.AllDirectories).Count());
}
#endregion
#region IsDriveMounted
[Fact]
public void IsDriveMounted_DriveIsNotMounted()
{
const string testDirectory = "c:/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
Assert.False(ds.IsDriveMounted("d:/manga/"));
}
[Fact]
public void IsDriveMounted_DriveIsMounted()
{
const string testDirectory = "c:/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}data-0.txt", new MockFileData("abc"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
Assert.True(ds.IsDriveMounted("c:/manga/file"));
}
#endregion
#region ExistOrCreate
[Fact]
public void ExistOrCreate_ShouldCreate()
{
var fileSystem = new MockFileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.ExistOrCreate("c:/manga/output/");
Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("c:/manga/output/").Exists);
}
#endregion
#region ClearAndDeleteDirectory
[Fact]
public void ClearAndDeleteDirectory_ShouldDeleteSelfAndAllFilesAndFolders()
{
const string testDirectory = "/manga/base/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc"));
}
fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc"));
fileSystem.AddDirectory($"{testDirectory}empty/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.ClearAndDeleteDirectory($"{testDirectory}");
Assert.Empty(ds.GetFiles("/manga/", searchOption: SearchOption.AllDirectories));
Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").GetDirectories());
Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists);
Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/base").Exists);
}
#endregion
#region ClearDirectory
[Fact]
public void ClearDirectory_ShouldDeleteAllFilesAndFolders_LeaveSelf()
{
const string testDirectory = "/manga/base/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc"));
}
fileSystem.AddFile($"{testDirectory}data-a.txt", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-b.txt", new MockFileData("abc"));
fileSystem.AddDirectory($"{testDirectory}file/empty/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.ClearDirectory($"{testDirectory}file/");
Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").GetDirectories());
Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName("/manga/").Exists);
Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists);
}
[Fact]
public void ClearDirectory_ShouldDeleteFoldersWithOneFileInside()
{
const string testDirectory = "/manga/base/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file/data-{i}.txt", new MockFileData("abc"));
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.ClearDirectory($"{testDirectory}");
Assert.Empty(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}").GetDirectories());
Assert.True(ds.FileSystem.DirectoryInfo.FromDirectoryName(testDirectory).Exists);
Assert.False(ds.FileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}file/").Exists);
}
#endregion
#region CopyFilesToDirectory
[Fact]
public void CopyFilesToDirectory_ShouldMoveAllFiles()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip"}, "/manga/output/");
Assert.Equal(2, ds.GetFiles("/manga/output/").Count());
}
[Fact]
public void CopyFilesToDirectory_ShouldMoveAllFiles_InclFilesInNestedFolders()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
fileSystem.AddFile($"{testDirectory}nested/file_11.zip", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"}, "/manga/output/");
Assert.Equal(3, ds.GetFiles("/manga/output/").Count());
}
[Fact]
public void CopyFilesToDirectory_ShouldMoveAllFiles_WithPrepend()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"},
"/manga/output/", "mangarocks_");
Assert.Equal(2, ds.GetFiles("/manga/output/").Count());
Assert.All(ds.GetFiles("/manga/output/"), filepath => ds.FileSystem.Path.GetFileName(filepath).StartsWith("mangarocks_"));
}
[Fact]
public void CopyFilesToDirectory_ShouldMoveOnlyFilesThatExist()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
{
fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.CopyFilesToDirectory(new []{$"{testDirectory}file_{0}.zip", $"{testDirectory}file_{1}.zip", $"{testDirectory}nested/file_11.zip"},
"/manga/output/");
Assert.Equal(2, ds.GetFiles("/manga/output/").Count());
}
#endregion
#region ListDirectory
[Fact]
public void ListDirectory_EmptyForNonExistent()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
Assert.Empty(ds.ListDirectory("/comics/"));
}
[Fact]
public void ListDirectory_ListsAllDirectories()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory($"{testDirectory}dir1");
fileSystem.AddDirectory($"{testDirectory}dir2");
fileSystem.AddDirectory($"{testDirectory}dir3");
fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
Assert.Equal(3, ds.ListDirectory(testDirectory).Count());
}
[Fact]
public void ListDirectory_ListsOnlyNonSystemAndHiddenOnly()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory($"{testDirectory}dir1");
var di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir1");
di.Attributes |= FileAttributes.System;
fileSystem.AddDirectory($"{testDirectory}dir2");
di = fileSystem.DirectoryInfo.FromDirectoryName($"{testDirectory}dir2");
di.Attributes |= FileAttributes.Hidden;
fileSystem.AddDirectory($"{testDirectory}dir3");
fileSystem.AddFile($"{testDirectory}file_0.zip", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
Assert.Equal(1, ds.ListDirectory(testDirectory).Count());
}
#endregion
#region ReadFileAsync
[Fact]
public async Task ReadFileAsync_ShouldGetBytes()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var bytes = await ds.ReadFileAsync($"{testDirectory}file_1.zip");
Assert.Equal(Encoding.UTF8.GetBytes("Hello"), bytes);
}
[Fact]
public async Task ReadFileAsync_ShouldReadNothingFromNonExistent()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddFile($"{testDirectory}file_1.zip", new MockFileData("Hello"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var bytes = await ds.ReadFileAsync($"{testDirectory}file_32123.zip");
Assert.Empty(bytes);
}
#endregion
#region FindHighestDirectoriesFromFiles
[Theory] [Theory]
[InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")]
public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] folders, string expectedDirectory) [InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/Dir 2/"}, new [] {"C:/Manga/Dir 1/Love Hina/Vol. 01.cbz"}, "C:/Manga/Dir 1/Love Hina")]
[InlineData(new [] {"C:/Manga/Dir 1/", "c://Manga/"}, new [] {"D:/Manga/Love Hina/Vol. 01.cbz", "D:/Manga/Vol. 01.cbz"}, "")]
public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
{ {
var actual = DirectoryService.FindHighestDirectoriesFromFiles(rootDirectories, folders); var fileSystem = new MockFileSystem();
var expected = new Dictionary<string, string> {{expectedDirectory, ""}}; foreach (var directory in rootDirectories)
{
fileSystem.AddDirectory(directory);
}
foreach (var f in files)
{
fileSystem.AddFile(f, new MockFileData(""));
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var actual = ds.FindHighestDirectoriesFromFiles(rootDirectories, files);
var expected = new Dictionary<string, string>();
if (!string.IsNullOrEmpty(expectedDirectory))
{
expected = new Dictionary<string, string> {{expectedDirectory, ""}};
}
Assert.Equal(expected, actual); Assert.Equal(expected, actual);
} }
#endregion
#region GetFoldersTillRoot
[Theory] [Theory]
[InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")] [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")]
[InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")] [InlineData("C:/Manga/", "C:/Manga/Love Hina/Specials/Omake", "Omake,Specials,Love Hina")]
@ -115,12 +638,78 @@ namespace API.Tests.Services
[InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")] [InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")]
public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray) public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray)
{ {
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(rootPath);
fileSystem.AddFile(fullpath, new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var expected = expectedArray.Split(","); var expected = expectedArray.Split(",");
if (expectedArray.Equals(string.Empty)) if (expectedArray.Equals(string.Empty))
{ {
expected = Array.Empty<string>(); expected = Array.Empty<string>();
} }
Assert.Equal(expected, DirectoryService.GetFoldersTillRoot(rootPath, fullpath)); Assert.Equal(expected, ds.GetFoldersTillRoot(rootPath, fullpath));
} }
#endregion
#region RemoveNonImages
[Fact]
public void RemoveNonImages()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(testDirectory);
fileSystem.AddFile($"{testDirectory}file/data-0.txt", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.RemoveNonImages($"{testDirectory}");
Assert.False(fileSystem.FileExists($"{testDirectory}file/data-0.txt"));
Assert.Equal(3, ds.GetFiles($"{testDirectory}", searchOption:SearchOption.AllDirectories).Count());
}
#endregion
#region Flatten
[Fact]
public void Flatten_ShouldDoNothing()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(testDirectory);
fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-2.png", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}data-3.webp", new MockFileData("abc"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.Flatten($"{testDirectory}");
Assert.True(fileSystem.FileExists($"{testDirectory}data-1.jpg"));
Assert.True(fileSystem.FileExists($"{testDirectory}data-2.png"));
Assert.True(fileSystem.FileExists($"{testDirectory}data-3.webp"));
}
[Fact]
public void Flatten_ShouldFlatten()
{
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(testDirectory);
fileSystem.AddFile($"{testDirectory}data-1.jpg", new MockFileData("abc"));
fileSystem.AddFile($"{testDirectory}subdir/data-3.webp", new MockFileData("abc"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
ds.Flatten($"{testDirectory}");
Assert.Equal(2, ds.GetFiles(testDirectory).Count());
Assert.False(fileSystem.FileExists($"{testDirectory}subdir/data-3.webp"));
Assert.True(fileSystem.Directory.Exists($"{testDirectory}subdir/"));
}
#endregion
} }
} }

View File

@ -0,0 +1,164 @@
using System.Collections.Generic;
using System.Data.Common;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
using API.Entities;
using API.Entities.Enums;
using API.Parser;
using API.Services;
using API.Services.Tasks.Scanner;
using API.SignalR;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
internal class MockReadingItemService : IReadingItemService
{
public ComicInfo GetComicInfo(string filePath, MangaFormat format)
{
throw new System.NotImplementedException();
}
public int GetNumberOfPages(string filePath, MangaFormat format)
{
throw new System.NotImplementedException();
}
public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format)
{
throw new System.NotImplementedException();
}
public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1)
{
throw new System.NotImplementedException();
}
public ParserInfo Parse(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
}
public class ParseScannedFilesTests
{
private readonly ILogger<ParseScannedFiles> _logger = Substitute.For<ILogger<ParseScannedFiles>>();
private readonly IUnitOfWork _unitOfWork;
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public ParseScannedFilesTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = DataDirectory
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
#region GetInfosByName
[Fact]
public void GetInfosByName()
{
}
#endregion
#region MergeName
[Fact]
public void MergeName_()
{
}
#endregion
}

View File

@ -11,8 +11,6 @@ using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Entities.Metadata; using API.Entities.Metadata;
using API.Helpers; using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser; using API.Parser;
using API.Services; using API.Services;
using API.Services.Tasks; using API.Services.Tasks;
@ -29,76 +27,8 @@ using Xunit;
namespace API.Tests.Services namespace API.Tests.Services
{ {
public class ScannerServiceTests : IDisposable public class ScannerServiceTests
{ {
private readonly ScannerService _scannerService;
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
private readonly IBookService _bookService = Substitute.For<IBookService>();
private readonly IImageService _imageService = Substitute.For<IImageService>();
private readonly IDirectoryService _directoryService = Substitute.For<IDirectoryService>();
private readonly ILogger<MetadataService> _metadataLogger = Substitute.For<ILogger<MetadataService>>();
private readonly ICacheService _cacheService;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
public ScannerServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
var file = new MockFileData("")
{
LastWriteTime = DateTimeOffset.Now.Subtract(TimeSpan.FromMinutes(1))
};
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
{ "/data/Darker than Black.zip", file },
{ "/data/Cage of Eden - v10.cbz", file },
{ "/data/Cage of Eden - v1.cbz", file },
});
var fileService = new FileService(fileSystem);
ICacheHelper cacheHelper = new CacheHelper(fileService);
IMetadataService metadataService =
Substitute.For<MetadataService>(unitOfWork, _metadataLogger, _archiveService,
_bookService, _imageService, _messageHub, cacheHelper);
_scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService,
_cacheService, _messageHub, fileService, _directoryService);
}
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
await Seed.SeedSettings(_context);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga")
}
}
});
return await _context.SaveChangesAsync() > 0;
}
[Fact] [Fact]
public void AddOrUpdateFileForChapter() public void AddOrUpdateFileForChapter()
{ {
@ -227,16 +157,5 @@ namespace API.Tests.Services
} }
} }
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
} }
} }

View File

@ -1,80 +0,0 @@
""" This script should be run on a directory which will generate a test case file
that can be loaded into the renametest.py"""
import os
from pathlib import Path
import shutil
verbose = False
def print_log(val):
if verbose:
print(val)
def create_test_base(file, root_dir):
""" Creates and returns a new base directory for data creation for a given testcase."""
base_dir = os.path.split(file.split('-testcase.txt')[0])[-1]
print_log('base_dir: {0}'.format(base_dir))
new_dir = os.path.join(root_dir, base_dir)
print_log('new dir: {0}'.format(new_dir))
p = Path(new_dir)
if not p.exists():
os.mkdir(new_dir)
return new_dir
def generate_data(file, root_dir):
''' Generates directories and fake files for testing against '''
base_dir = ''
if file.endswith('-testcase.txt'):
base_dir = create_test_base(file, root_dir)
files_to_create = []
with open(file, 'r') as in_file:
files_to_create = in_file.read().splitlines()
for filepath in files_to_create:
for part in os.path.split(filepath):
part_path = os.path.join(base_dir, part)
print_log('Checking if {0} exists '.format(part_path))
p = Path(part_path)
if not p.exists():
print_log('Creating: {0}'.format(part))
if p.suffix != '':
with open(os.path.join(root_dir, base_dir + '/' + filepath), 'w+') as f:
f.write('')
else:
os.mkdir(part_path)
def clean_up_generated_data(root_dir):
for root, dirs, files in os.walk(root_dir):
for dir in dirs:
shutil.rmtree(os.path.join(root, dir))
for file in files:
if not file.endswith('-testcase.txt'):
print_log('Removing {0}'.format(os.path.join(root, file)))
os.remove(os.path.join(root, file))
def generate_test_file():
root_dir = os.path.abspath('.')
current_folder = os.path.split(root_dir)[-1]
out_files = []
for root, _, files in os.walk(root_dir):
for file in files:
if not file.endswith('-testcase.txt'):
filename = os.path.join(root.replace(root_dir, ''), file) # root_dir or root_dir + '//'?
out_files.append(filename)
with open(os.path.join(root_dir, current_folder + '-testcase.txt'), 'w+') as f:
for filename in out_files:
f.write(filename + '\n')
if __name__ == '__main__':
verbose = True
generate_test_file()

View File

@ -1,2 +1,3 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation"> <wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=covers/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary> <s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=covers/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=wwwroot/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

View File

@ -4,12 +4,11 @@ using System.Linq;
using System.Reflection; using System.Reflection;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Constants; using API.Constants;
using API.Data;
using API.DTOs; using API.DTOs;
using API.DTOs.Account; using API.DTOs.Account;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using AutoMapper; using AutoMapper;
using Kavita.Common; using Kavita.Common;

View File

@ -1,12 +1,11 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader; using API.DTOs.Reader;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using HtmlAgilityPack; using HtmlAgilityPack;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;

View File

@ -4,10 +4,8 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data; using API.Data;
using API.DTOs.CollectionTags; using API.DTOs.CollectionTags;
using API.Entities;
using API.Entities.Metadata; using API.Entities.Metadata;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;

View File

@ -4,12 +4,11 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.Data;
using API.DTOs.Downloads; using API.DTOs.Downloads;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using API.SignalR; using API.SignalR;
using Kavita.Common; using Kavita.Common;
@ -47,21 +46,21 @@ namespace API.Controllers
public async Task<ActionResult<long>> GetVolumeSize(int volumeId) public async Task<ActionResult<long>> GetVolumeSize(int volumeId)
{ {
var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId);
return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath)));
} }
[HttpGet("chapter-size")] [HttpGet("chapter-size")]
public async Task<ActionResult<long>> GetChapterSize(int chapterId) public async Task<ActionResult<long>> GetChapterSize(int chapterId)
{ {
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath)));
} }
[HttpGet("series-size")] [HttpGet("series-size")]
public async Task<ActionResult<long>> GetSeriesSize(int seriesId) public async Task<ActionResult<long>> GetSeriesSize(int seriesId)
{ {
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); return Ok(_directoryService.GetTotalSize(files.Select(c => c.FilePath)));
} }
[HttpGet("volume")] [HttpGet("volume")]
@ -141,13 +140,13 @@ namespace API.Controllers
var totalFilePaths = new List<string>(); var totalFilePaths = new List<string>();
var tempFolder = $"download_{series.Id}_bookmarks"; var tempFolder = $"download_{series.Id}_bookmarks";
var fullExtractPath = Path.Join(DirectoryService.TempDirectory, tempFolder); var fullExtractPath = Path.Join(_directoryService.TempDirectory, tempFolder);
if (new DirectoryInfo(fullExtractPath).Exists) if (_directoryService.FileSystem.DirectoryInfo.FromDirectoryName(fullExtractPath).Exists)
{ {
return BadRequest( return BadRequest(
"Server is currently processing this exact download. Please try again in a few minutes."); "Server is currently processing this exact download. Please try again in a few minutes.");
} }
DirectoryService.ExistOrCreate(fullExtractPath); _directoryService.ExistOrCreate(fullExtractPath);
var uniqueChapterIds = downloadBookmarkDto.Bookmarks.Select(b => b.ChapterId).Distinct().ToList(); var uniqueChapterIds = downloadBookmarkDto.Bookmarks.Select(b => b.ChapterId).Distinct().ToList();
@ -160,16 +159,16 @@ namespace API.Controllers
switch (series.Format) switch (series.Format)
{ {
case MangaFormat.Image: case MangaFormat.Image:
DirectoryService.ExistOrCreate(chapterExtractPath); _directoryService.ExistOrCreate(chapterExtractPath);
_directoryService.CopyFilesToDirectory(mangaFiles.Select(f => f.FilePath), chapterExtractPath, $"{chapterId}_"); _directoryService.CopyFilesToDirectory(mangaFiles.Select(f => f.FilePath), chapterExtractPath, $"{chapterId}_");
break; break;
case MangaFormat.Archive: case MangaFormat.Archive:
case MangaFormat.Pdf: case MangaFormat.Pdf:
_cacheService.ExtractChapterFiles(chapterExtractPath, mangaFiles.ToList()); _cacheService.ExtractChapterFiles(chapterExtractPath, mangaFiles.ToList());
var originalFiles = DirectoryService.GetFilesWithExtension(chapterExtractPath, var originalFiles = _directoryService.GetFilesWithExtension(chapterExtractPath,
Parser.Parser.ImageFileExtensions); Parser.Parser.ImageFileExtensions);
_directoryService.CopyFilesToDirectory(originalFiles, chapterExtractPath, $"{chapterId}_"); _directoryService.CopyFilesToDirectory(originalFiles, chapterExtractPath, $"{chapterId}_");
DirectoryService.DeleteFiles(originalFiles); _directoryService.DeleteFiles(originalFiles);
break; break;
case MangaFormat.Epub: case MangaFormat.Epub:
return BadRequest("Series is not in a valid format."); return BadRequest("Series is not in a valid format.");
@ -177,7 +176,7 @@ namespace API.Controllers
return BadRequest("Series is not in a valid format. Please rescan series and try again."); return BadRequest("Series is not in a valid format. Please rescan series and try again.");
} }
var files = DirectoryService.GetFilesWithExtension(chapterExtractPath, Parser.Parser.ImageFileExtensions); var files = _directoryService.GetFilesWithExtension(chapterExtractPath, Parser.Parser.ImageFileExtensions);
// Filter out images that aren't in bookmarks // Filter out images that aren't in bookmarks
Array.Sort(files, _numericComparer); Array.Sort(files, _numericComparer);
totalFilePaths.AddRange(files.Where((_, i) => chapterPages.Contains(i))); totalFilePaths.AddRange(files.Where((_, i) => chapterPages.Contains(i)));
@ -186,7 +185,7 @@ namespace API.Controllers
var (fileBytes, _) = await _archiveService.CreateZipForDownload(totalFilePaths, var (fileBytes, _) = await _archiveService.CreateZipForDownload(totalFilePaths,
tempFolder); tempFolder);
DirectoryService.ClearAndDeleteDirectory(fullExtractPath); _directoryService.ClearAndDeleteDirectory(fullExtractPath);
return File(fileBytes, DefaultContentType, $"{series.Name} - Bookmarks.zip"); return File(fileBytes, DefaultContentType, $"{series.Name} - Bookmarks.zip");
} }

View File

@ -1,5 +1,5 @@
using System.IO; using System.IO;
using API.Interfaces; using API.Services;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
namespace API.Controllers namespace API.Controllers

View File

@ -1,7 +1,7 @@
using System.IO; using System.IO;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Services; using API.Services;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
@ -13,11 +13,13 @@ namespace API.Controllers
public class ImageController : BaseApiController public class ImageController : BaseApiController
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IDirectoryService _directoryService;
/// <inheritdoc /> /// <inheritdoc />
public ImageController(IUnitOfWork unitOfWork) public ImageController(IUnitOfWork unitOfWork, IDirectoryService directoryService)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_directoryService = directoryService;
} }
/// <summary> /// <summary>
@ -28,12 +30,12 @@ namespace API.Controllers
[HttpGet("chapter-cover")] [HttpGet("chapter-cover")]
public async Task<ActionResult> GetChapterCoverImage(int chapterId) public async Task<ActionResult> GetChapterCoverImage(int chapterId)
{ {
var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.ChapterRepository.GetChapterCoverImageAsync(chapterId)); var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.ChapterRepository.GetChapterCoverImageAsync(chapterId));
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image");
var format = Path.GetExtension(path).Replace(".", ""); var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", "");
Response.AddCacheHeader(path); Response.AddCacheHeader(path);
return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path));
} }
/// <summary> /// <summary>
@ -44,12 +46,12 @@ namespace API.Controllers
[HttpGet("volume-cover")] [HttpGet("volume-cover")]
public async Task<ActionResult> GetVolumeCoverImage(int volumeId) public async Task<ActionResult> GetVolumeCoverImage(int volumeId)
{ {
var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.VolumeRepository.GetVolumeCoverImageAsync(volumeId)); var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.VolumeRepository.GetVolumeCoverImageAsync(volumeId));
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image");
var format = Path.GetExtension(path).Replace(".", ""); var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", "");
Response.AddCacheHeader(path); Response.AddCacheHeader(path);
return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path));
} }
/// <summary> /// <summary>
@ -60,12 +62,12 @@ namespace API.Controllers
[HttpGet("series-cover")] [HttpGet("series-cover")]
public async Task<ActionResult> GetSeriesCoverImage(int seriesId) public async Task<ActionResult> GetSeriesCoverImage(int seriesId)
{ {
var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.SeriesRepository.GetSeriesCoverImageAsync(seriesId)); var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.SeriesRepository.GetSeriesCoverImageAsync(seriesId));
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image");
var format = Path.GetExtension(path).Replace(".", ""); var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", "");
Response.AddCacheHeader(path); Response.AddCacheHeader(path);
return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path));
} }
/// <summary> /// <summary>
@ -76,12 +78,12 @@ namespace API.Controllers
[HttpGet("collection-cover")] [HttpGet("collection-cover")]
public async Task<ActionResult> GetCollectionCoverImage(int collectionTagId) public async Task<ActionResult> GetCollectionCoverImage(int collectionTagId)
{ {
var path = Path.Join(DirectoryService.CoverImageDirectory, await _unitOfWork.CollectionTagRepository.GetCoverImageAsync(collectionTagId)); var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.CollectionTagRepository.GetCoverImageAsync(collectionTagId));
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No cover image"); if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest($"No cover image");
var format = Path.GetExtension(path).Replace(".", ""); var format = _directoryService.FileSystem.Path.GetExtension(path).Replace(".", "");
Response.AddCacheHeader(path); Response.AddCacheHeader(path);
return PhysicalFile(path, "image/" + format, Path.GetFileName(path)); return PhysicalFile(path, "image/" + format, _directoryService.FileSystem.Path.GetFileName(path));
} }
} }
} }

View File

@ -3,13 +3,13 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories; using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Services;
using API.Interfaces.Services;
using AutoMapper; using AutoMapper;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;

View File

@ -5,6 +5,7 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Xml.Serialization; using System.Xml.Serialization;
using API.Comparators; using API.Comparators;
using API.Data;
using API.DTOs; using API.DTOs;
using API.DTOs.CollectionTags; using API.DTOs.CollectionTags;
using API.DTOs.Filtering; using API.DTOs.Filtering;
@ -12,16 +13,14 @@ using API.DTOs.OPDS;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
namespace API.Controllers namespace API.Controllers;
public class OpdsController : BaseApiController
{ {
public class OpdsController : BaseApiController
{
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IDownloadService _downloadService; private readonly IDownloadService _downloadService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
@ -642,7 +641,7 @@ namespace API.Controllers
private FeedEntry CreateChapter(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, SeriesDto series, Volume volume, ChapterDto chapter, string apiKey) private FeedEntry CreateChapter(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, SeriesDto series, Volume volume, ChapterDto chapter, string apiKey)
{ {
var fileSize = var fileSize =
DirectoryService.GetHumanReadableBytes(DirectoryService.GetTotalSize(new List<string>() DirectoryService.GetHumanReadableBytes(_directoryService.GetTotalSize(new List<string>()
{mangaFile.FilePath})); {mangaFile.FilePath}));
var fileType = _downloadService.GetContentTypeFromFile(mangaFile.FilePath); var fileType = _downloadService.GetContentTypeFromFile(mangaFile.FilePath);
var filename = Uri.EscapeDataString(Path.GetFileName(mangaFile.FilePath) ?? string.Empty); var filename = Uri.EscapeDataString(Path.GetFileName(mangaFile.FilePath) ?? string.Empty);
@ -678,7 +677,7 @@ namespace API.Controllers
try try
{ {
var (path, _) = await _cacheService.GetCachedPagePath(chapter, pageNumber); var path = _cacheService.GetCachedPagePath(chapter, pageNumber);
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {pageNumber}"); if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {pageNumber}");
var content = await _directoryService.ReadFileAsync(path); var content = await _directoryService.ReadFileAsync(path);
@ -708,7 +707,7 @@ namespace API.Controllers
[HttpGet("{apiKey}/favicon")] [HttpGet("{apiKey}/favicon")]
public async Task<ActionResult> GetFavicon(string apiKey) public async Task<ActionResult> GetFavicon(string apiKey)
{ {
var files = DirectoryService.GetFilesWithExtension(Path.Join(Directory.GetCurrentDirectory(), ".."), @"\.ico"); var files = _directoryService.GetFilesWithExtension(Path.Join(Directory.GetCurrentDirectory(), ".."), @"\.ico");
if (files.Length == 0) return BadRequest("Cannot find icon"); if (files.Length == 0) return BadRequest("Cannot find icon");
var path = files[0]; var path = files[0];
var content = await _directoryService.ReadFileAsync(path); var content = await _directoryService.ReadFileAsync(path);
@ -781,5 +780,4 @@ namespace API.Controllers
_xmlSerializer.Serialize(sm, feed); _xmlSerializer.Serialize(sm, feed);
return sm.ToString().Replace("utf-16", "utf-8"); // Chunky cannot accept UTF-16 feeds return sm.ToString().Replace("utf-16", "utf-8"); // Chunky cannot accept UTF-16 feeds
} }
}
} }

View File

@ -1,7 +1,7 @@
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.DTOs; using API.DTOs;
using API.Interfaces; using API.Services;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;

View File

@ -3,13 +3,13 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories; using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader; using API.DTOs.Reader;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Services;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -50,7 +50,7 @@ namespace API.Controllers
try try
{ {
var (path, _) = await _cacheService.GetCachedPagePath(chapter, page); var path = _cacheService.GetCachedPagePath(chapter, page);
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}"); if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {page}");
var format = Path.GetExtension(path).Replace(".", ""); var format = Path.GetExtension(path).Replace(".", "");
@ -90,7 +90,7 @@ namespace API.Controllers
LibraryId = dto.LibraryId, LibraryId = dto.LibraryId,
IsSpecial = dto.IsSpecial, IsSpecial = dto.IsSpecial,
Pages = dto.Pages, Pages = dto.Pages,
ChapterTitle = dto.ChapterTitle ChapterTitle = dto.ChapterTitle ?? string.Empty
}); });
} }

View File

@ -2,11 +2,11 @@
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.Data;
using API.DTOs.ReadingLists; using API.DTOs.ReadingLists;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
namespace API.Controllers namespace API.Controllers

View File

@ -6,11 +6,10 @@ using API.Data;
using API.Data.Repositories; using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.DTOs.Filtering; using API.DTOs.Filtering;
using API.DTOs.Metadata;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces; using API.Services;
using API.SignalR; using API.SignalR;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;

View File

@ -5,7 +5,8 @@ using System.Threading.Tasks;
using API.DTOs.Stats; using API.DTOs.Stats;
using API.DTOs.Update; using API.DTOs.Update;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services; using API.Services;
using API.Services.Tasks;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
@ -26,10 +27,11 @@ namespace API.Controllers
private readonly ICacheService _cacheService; private readonly ICacheService _cacheService;
private readonly IVersionUpdaterService _versionUpdaterService; private readonly IVersionUpdaterService _versionUpdaterService;
private readonly IStatsService _statsService; private readonly IStatsService _statsService;
private readonly ICleanupService _cleanupService;
public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config, public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config,
IBackupService backupService, IArchiveService archiveService, ICacheService cacheService, IBackupService backupService, IArchiveService archiveService, ICacheService cacheService,
IVersionUpdaterService versionUpdaterService, IStatsService statsService) IVersionUpdaterService versionUpdaterService, IStatsService statsService, ICleanupService cleanupService)
{ {
_applicationLifetime = applicationLifetime; _applicationLifetime = applicationLifetime;
_logger = logger; _logger = logger;
@ -39,6 +41,7 @@ namespace API.Controllers
_cacheService = cacheService; _cacheService = cacheService;
_versionUpdaterService = versionUpdaterService; _versionUpdaterService = versionUpdaterService;
_statsService = statsService; _statsService = statsService;
_cleanupService = cleanupService;
} }
/// <summary> /// <summary>
@ -62,7 +65,7 @@ namespace API.Controllers
public ActionResult ClearCache() public ActionResult ClearCache()
{ {
_logger.LogInformation("{UserName} is clearing cache of server from admin dashboard", User.GetUsername()); _logger.LogInformation("{UserName} is clearing cache of server from admin dashboard", User.GetUsername());
_cacheService.Cleanup(); _cleanupService.CleanupCacheDirectory();
return Ok(); return Ok();
} }
@ -93,7 +96,7 @@ namespace API.Controllers
[HttpGet("logs")] [HttpGet("logs")]
public async Task<ActionResult> GetLogs() public async Task<ActionResult> GetLogs()
{ {
var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); var files = _backupService.GetLogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName());
try try
{ {
var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files, "logs"); var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files, "logs");

View File

@ -3,12 +3,11 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.DTOs.Settings; using API.DTOs.Settings;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers.Converters; using API.Helpers.Converters;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using Kavita.Common; using Kavita.Common;
using Kavita.Common.Extensions; using Kavita.Common.Extensions;

View File

@ -1,8 +1,7 @@
using System; using System;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.DTOs.Uploads; using API.DTOs.Uploads;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;

View File

@ -1,10 +1,10 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories; using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;

View File

@ -1,4 +1,5 @@
using API.Entities.Enums; using System;
using API.Entities.Enums;
namespace API.DTOs.Reader namespace API.DTOs.Reader
{ {
@ -12,7 +13,7 @@ namespace API.DTOs.Reader
public MangaFormat SeriesFormat { get; set; } public MangaFormat SeriesFormat { get; set; }
public int SeriesId { get; set; } public int SeriesId { get; set; }
public int LibraryId { get; set; } public int LibraryId { get; set; }
public string ChapterTitle { get; set; } = ""; public string ChapterTitle { get; set; } = string.Empty;
public int Pages { get; set; } public int Pages { get; set; }
public string FileName { get; set; } public string FileName { get; set; }
public bool IsSpecial { get; set; } public bool IsSpecial { get; set; }

View File

@ -1,6 +1,5 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using API.Data.Metadata; using API.Data.Metadata;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;

View File

@ -1,12 +1,16 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.IO.Abstractions;
using System.Linq; using System.Linq;
using API.Services; using API.Services;
using Kavita.Common; using Kavita.Common;
namespace API.Data namespace API.Data
{ {
/// <summary>
/// A Migration to migrate config related files to the config/ directory for installs prior to v0.4.9.
/// </summary>
public static class MigrateConfigFiles public static class MigrateConfigFiles
{ {
private static readonly List<string> LooseLeafFiles = new List<string>() private static readonly List<string> LooseLeafFiles = new List<string>()
@ -31,7 +35,7 @@ namespace API.Data
/// In v0.4.8 we moved all config files to config/ to match with how docker was setup. This will move all config files from current directory /// In v0.4.8 we moved all config files to config/ to match with how docker was setup. This will move all config files from current directory
/// to config/ /// to config/
/// </summary> /// </summary>
public static void Migrate(bool isDocker) public static void Migrate(bool isDocker, IDirectoryService directoryService)
{ {
Console.WriteLine("Checking if migration to config/ is needed"); Console.WriteLine("Checking if migration to config/ is needed");
@ -46,8 +50,8 @@ namespace API.Data
Console.WriteLine( Console.WriteLine(
"Migrating files from pre-v0.4.8. All Kavita config files are now located in config/"); "Migrating files from pre-v0.4.8. All Kavita config files are now located in config/");
CopyAppFolders(); CopyAppFolders(directoryService);
DeleteAppFolders(); DeleteAppFolders(directoryService);
UpdateConfiguration(); UpdateConfiguration();
@ -64,14 +68,14 @@ namespace API.Data
Console.WriteLine( Console.WriteLine(
"Migrating files from pre-v0.4.8. All Kavita config files are now located in config/"); "Migrating files from pre-v0.4.8. All Kavita config files are now located in config/");
Console.WriteLine($"Creating {DirectoryService.ConfigDirectory}"); Console.WriteLine($"Creating {directoryService.ConfigDirectory}");
DirectoryService.ExistOrCreate(DirectoryService.ConfigDirectory); directoryService.ExistOrCreate(directoryService.ConfigDirectory);
try try
{ {
CopyLooseLeafFiles(); CopyLooseLeafFiles(directoryService);
CopyAppFolders(); CopyAppFolders(directoryService);
// Then we need to update the config file to point to the new DB file // Then we need to update the config file to point to the new DB file
UpdateConfiguration(); UpdateConfiguration();
@ -84,43 +88,43 @@ namespace API.Data
// Finally delete everything in the source directory // Finally delete everything in the source directory
Console.WriteLine("Removing old files"); Console.WriteLine("Removing old files");
DeleteLooseFiles(); DeleteLooseFiles(directoryService);
DeleteAppFolders(); DeleteAppFolders(directoryService);
Console.WriteLine("Removing old files...DONE"); Console.WriteLine("Removing old files...DONE");
Console.WriteLine("Migration complete. All config files are now in config/ directory"); Console.WriteLine("Migration complete. All config files are now in config/ directory");
} }
private static void DeleteAppFolders() private static void DeleteAppFolders(IDirectoryService directoryService)
{ {
foreach (var folderToDelete in AppFolders) foreach (var folderToDelete in AppFolders)
{ {
if (!new DirectoryInfo(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)).Exists) continue; if (!new DirectoryInfo(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)).Exists) continue;
DirectoryService.ClearAndDeleteDirectory(Path.Join(Directory.GetCurrentDirectory(), folderToDelete)); directoryService.ClearAndDeleteDirectory(Path.Join(Directory.GetCurrentDirectory(), folderToDelete));
} }
} }
private static void DeleteLooseFiles() private static void DeleteLooseFiles(IDirectoryService directoryService)
{ {
var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file))) var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file)))
.Where(f => f.Exists); .Where(f => f.Exists);
DirectoryService.DeleteFiles(configFiles.Select(f => f.FullName)); directoryService.DeleteFiles(configFiles.Select(f => f.FullName));
} }
private static void CopyAppFolders() private static void CopyAppFolders(IDirectoryService directoryService)
{ {
Console.WriteLine("Moving folders to config"); Console.WriteLine("Moving folders to config");
foreach (var folderToMove in AppFolders) foreach (var folderToMove in AppFolders)
{ {
if (new DirectoryInfo(Path.Join(DirectoryService.ConfigDirectory, folderToMove)).Exists) continue; if (new DirectoryInfo(Path.Join(directoryService.ConfigDirectory, folderToMove)).Exists) continue;
try try
{ {
DirectoryService.CopyDirectoryToDirectory( directoryService.CopyDirectoryToDirectory(
Path.Join(Directory.GetCurrentDirectory(), folderToMove), Path.Join(directoryService.FileSystem.Directory.GetCurrentDirectory(), folderToMove),
Path.Join(DirectoryService.ConfigDirectory, folderToMove)); Path.Join(directoryService.ConfigDirectory, folderToMove));
} }
catch (Exception) catch (Exception)
{ {
@ -132,9 +136,9 @@ namespace API.Data
Console.WriteLine("Moving folders to config...DONE"); Console.WriteLine("Moving folders to config...DONE");
} }
private static void CopyLooseLeafFiles() private static void CopyLooseLeafFiles(IDirectoryService directoryService)
{ {
var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file))) var configFiles = LooseLeafFiles.Select(file => new FileInfo(Path.Join(directoryService.FileSystem.Directory.GetCurrentDirectory(), file)))
.Where(f => f.Exists); .Where(f => f.Exists);
// First step is to move all the files // First step is to move all the files
Console.WriteLine("Moving files to config/"); Console.WriteLine("Moving files to config/");
@ -142,7 +146,7 @@ namespace API.Data
{ {
try try
{ {
fileInfo.CopyTo(Path.Join(DirectoryService.ConfigDirectory, fileInfo.Name)); fileInfo.CopyTo(Path.Join(directoryService.ConfigDirectory, fileInfo.Name));
} }
catch (Exception) catch (Exception)
{ {

View File

@ -29,10 +29,10 @@ namespace API.Data
/// <summary> /// <summary>
/// Run first. Will extract byte[]s from DB and write them to the cover directory. /// Run first. Will extract byte[]s from DB and write them to the cover directory.
/// </summary> /// </summary>
public static void ExtractToImages(DbContext context) public static void ExtractToImages(DbContext context, IDirectoryService directoryService, IImageService imageService)
{ {
Console.WriteLine("Migrating Cover Images to disk. Expect delay."); Console.WriteLine("Migrating Cover Images to disk. Expect delay.");
DirectoryService.ExistOrCreate(DirectoryService.CoverImageDirectory); directoryService.ExistOrCreate(directoryService.CoverImageDirectory);
Console.WriteLine("Extracting cover images for Series"); Console.WriteLine("Extracting cover images for Series");
var lockedSeries = SqlHelper.RawSqlQuery(context, "Select Id, CoverImage From Series Where CoverImage IS NOT NULL", x => var lockedSeries = SqlHelper.RawSqlQuery(context, "Select Id, CoverImage From Series Where CoverImage IS NOT NULL", x =>
@ -45,14 +45,14 @@ namespace API.Data
foreach (var series in lockedSeries) foreach (var series in lockedSeries)
{ {
if (series.CoverImage == null || !series.CoverImage.Any()) continue; if (series.CoverImage == null || !series.CoverImage.Any()) continue;
if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetSeriesFormat(int.Parse(series.Id))}.png"))) continue; $"{ImageService.GetSeriesFormat(int.Parse(series.Id))}.png"))) continue;
try try
{ {
var stream = new MemoryStream(series.CoverImage); var stream = new MemoryStream(series.CoverImage);
stream.Position = 0; stream.Position = 0;
ImageService.WriteCoverThumbnail(stream, ImageService.GetSeriesFormat(int.Parse(series.Id))); imageService.WriteCoverThumbnail(stream, ImageService.GetSeriesFormat(int.Parse(series.Id)));
} }
catch (Exception e) catch (Exception e)
{ {
@ -71,14 +71,14 @@ namespace API.Data
foreach (var chapter in chapters) foreach (var chapter in chapters)
{ {
if (chapter.CoverImage == null || !chapter.CoverImage.Any()) continue; if (chapter.CoverImage == null || !chapter.CoverImage.Any()) continue;
if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}.png"))) continue; $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}.png"))) continue;
try try
{ {
var stream = new MemoryStream(chapter.CoverImage); var stream = new MemoryStream(chapter.CoverImage);
stream.Position = 0; stream.Position = 0;
ImageService.WriteCoverThumbnail(stream, $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}"); imageService.WriteCoverThumbnail(stream, $"{ImageService.GetChapterFormat(int.Parse(chapter.Id), int.Parse(chapter.ParentId))}");
} }
catch (Exception e) catch (Exception e)
{ {
@ -97,13 +97,13 @@ namespace API.Data
foreach (var tag in tags) foreach (var tag in tags)
{ {
if (tag.CoverImage == null || !tag.CoverImage.Any()) continue; if (tag.CoverImage == null || !tag.CoverImage.Any()) continue;
if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (directoryService.FileSystem.File.Exists(Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}.png"))) continue; $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}.png"))) continue;
try try
{ {
var stream = new MemoryStream(tag.CoverImage); var stream = new MemoryStream(tag.CoverImage);
stream.Position = 0; stream.Position = 0;
ImageService.WriteCoverThumbnail(stream, $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}"); imageService.WriteCoverThumbnail(stream, $"{ImageService.GetCollectionTagFormat(int.Parse(tag.Id))}");
} }
catch (Exception e) catch (Exception e)
{ {
@ -116,13 +116,13 @@ namespace API.Data
/// Run after <see cref="ExtractToImages"/>. Will update the DB with names of files that were extracted. /// Run after <see cref="ExtractToImages"/>. Will update the DB with names of files that were extracted.
/// </summary> /// </summary>
/// <param name="context"></param> /// <param name="context"></param>
public static async Task UpdateDatabaseWithImages(DataContext context) public static async Task UpdateDatabaseWithImages(DataContext context, IDirectoryService directoryService)
{ {
Console.WriteLine("Updating Series entities"); Console.WriteLine("Updating Series entities");
var seriesCovers = await context.Series.Where(s => !string.IsNullOrEmpty(s.CoverImage)).ToListAsync(); var seriesCovers = await context.Series.Where(s => !string.IsNullOrEmpty(s.CoverImage)).ToListAsync();
foreach (var series in seriesCovers) foreach (var series in seriesCovers)
{ {
if (!File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (!directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetSeriesFormat(series.Id)}.png"))) continue; $"{ImageService.GetSeriesFormat(series.Id)}.png"))) continue;
series.CoverImage = $"{ImageService.GetSeriesFormat(series.Id)}.png"; series.CoverImage = $"{ImageService.GetSeriesFormat(series.Id)}.png";
} }
@ -133,7 +133,7 @@ namespace API.Data
var chapters = await context.Chapter.ToListAsync(); var chapters = await context.Chapter.ToListAsync();
foreach (var chapter in chapters) foreach (var chapter in chapters)
{ {
if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png"))) $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png")))
{ {
chapter.CoverImage = $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png"; chapter.CoverImage = $"{ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId)}.png";
@ -149,7 +149,7 @@ namespace API.Data
{ {
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), ChapterSortComparerForInChapterSorting).FirstOrDefault(); var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), ChapterSortComparerForInChapterSorting).FirstOrDefault();
if (firstChapter == null) continue; if (firstChapter == null) continue;
if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png"))) $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png")))
{ {
volume.CoverImage = $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png"; volume.CoverImage = $"{ImageService.GetChapterFormat(firstChapter.Id, firstChapter.VolumeId)}.png";
@ -163,7 +163,7 @@ namespace API.Data
var tags = await context.CollectionTag.ToListAsync(); var tags = await context.CollectionTag.ToListAsync();
foreach (var tag in tags) foreach (var tag in tags)
{ {
if (File.Exists(Path.Join(DirectoryService.CoverImageDirectory, if (directoryService.FileSystem.File.Exists(directoryService.FileSystem.Path.Join(directoryService.CoverImageDirectory,
$"{ImageService.GetCollectionTagFormat(tag.Id)}.png"))) $"{ImageService.GetCollectionTagFormat(tag.Id)}.png")))
{ {
tag.CoverImage = $"{ImageService.GetCollectionTagFormat(tag.Id)}.png"; tag.CoverImage = $"{ImageService.GetCollectionTagFormat(tag.Id)}.png";

View File

@ -2,13 +2,20 @@
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Repositories;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface IAppUserProgressRepository
{
void Update(AppUserProgress userProgress);
Task<int> CleanupAbandonedChapters();
Task<bool> UserHasProgress(LibraryType libraryType, int userId);
Task<AppUserProgress> GetUserProgressAsync(int chapterId, int userId);
}
public class AppUserProgressRepository : IAppUserProgressRepository
{ {
public class AppUserProgressRepository : IAppUserProgressRepository
{
private readonly DataContext _context; private readonly DataContext _context;
public AppUserProgressRepository(DataContext context) public AppUserProgressRepository(DataContext context)
@ -75,5 +82,4 @@ namespace API.Data.Repositories
.Where(p => p.ChapterId == chapterId && p.AppUserId == userId) .Where(p => p.ChapterId == chapterId && p.AppUserId == userId)
.FirstOrDefaultAsync(); .FirstOrDefaultAsync();
} }
}
} }

View File

@ -1,19 +1,32 @@
using System; using System.Collections.Generic;
using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader; using API.DTOs.Reader;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface IChapterRepository
{
void Update(Chapter chapter);
Task<IEnumerable<Chapter>> GetChaptersByIdsAsync(IList<int> chapterIds);
Task<IChapterInfoDto> GetChapterInfoDtoAsync(int chapterId);
Task<int> GetChapterTotalPagesAsync(int chapterId);
Task<Chapter> GetChapterAsync(int chapterId);
Task<ChapterDto> GetChapterDtoAsync(int chapterId);
Task<IList<MangaFile>> GetFilesForChapterAsync(int chapterId);
Task<IList<Chapter>> GetChaptersAsync(int volumeId);
Task<IList<MangaFile>> GetFilesForChaptersAsync(IReadOnlyList<int> chapterIds);
Task<string> GetChapterCoverImageAsync(int chapterId);
Task<IList<string>> GetAllCoverImagesAsync();
Task<IEnumerable<string>> GetCoverImagesForLockedChaptersAsync();
}
public class ChapterRepository : IChapterRepository
{ {
public class ChapterRepository : IChapterRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -191,5 +204,4 @@ namespace API.Data.Repositories
.AsNoTracking() .AsNoTracking()
.ToListAsync(); .ToListAsync();
} }
}
} }

View File

@ -3,15 +3,29 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs.CollectionTags; using API.DTOs.CollectionTags;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface ICollectionTagRepository
{
void Add(CollectionTag tag);
void Remove(CollectionTag tag);
Task<IEnumerable<CollectionTagDto>> GetAllTagDtosAsync();
Task<IEnumerable<CollectionTagDto>> SearchTagDtosAsync(string searchQuery);
Task<string> GetCoverImageAsync(int collectionTagId);
Task<IEnumerable<CollectionTagDto>> GetAllPromotedTagDtosAsync();
Task<CollectionTag> GetTagAsync(int tagId);
Task<CollectionTag> GetFullTagAsync(int tagId);
void Update(CollectionTag tag);
Task<int> RemoveTagsWithoutSeries();
Task<IEnumerable<CollectionTag>> GetAllTagsAsync();
Task<IList<string>> GetAllCoverImagesAsync();
}
public class CollectionTagRepository : ICollectionTagRepository
{ {
public class CollectionTagRepository : ICollectionTagRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -121,5 +135,4 @@ namespace API.Data.Repositories
.AsNoTracking() .AsNoTracking()
.SingleOrDefaultAsync(); .SingleOrDefaultAsync();
} }
}
} }

View File

@ -2,12 +2,20 @@
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories; namespace API.Data.Repositories;
public interface IGenreRepository
{
void Attach(Genre genre);
void Remove(Genre genre);
Task<Genre> FindByNameAsync(string genreName);
Task<IList<Genre>> GetAllGenres();
Task RemoveAllGenreNoLongerAssociated(bool removeExternal = false);
}
public class GenreRepository : IGenreRepository public class GenreRepository : IGenreRepository
{ {
private readonly DataContext _context; private readonly DataContext _context;
@ -51,6 +59,6 @@ public class GenreRepository : IGenreRepository
public async Task<IList<Genre>> GetAllGenres() public async Task<IList<Genre>> GetAllGenres()
{ {
return await _context.Genre.ToListAsync();; return await _context.Genre.ToListAsync();
} }
} }

View File

@ -5,26 +5,41 @@ using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
{
[Flags] [Flags]
public enum LibraryIncludes public enum LibraryIncludes
{ {
None = 1, None = 1,
Series = 2, Series = 2,
AppUser = 4, AppUser = 4,
Folders = 8, Folders = 8,
// Ratings = 16 // Ratings = 16
} }
public class LibraryRepository : ILibraryRepository public interface ILibraryRepository
{ {
void Add(Library library);
void Update(Library library);
void Delete(Library library);
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
Task<bool> LibraryExists(string libraryName);
Task<Library> GetLibraryForIdAsync(int libraryId, LibraryIncludes includes);
Task<Library> GetFullLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId);
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
Task<IEnumerable<Library>> GetLibrariesAsync();
Task<bool> DeleteLibrary(int libraryId);
Task<IEnumerable<Library>> GetLibrariesForUserIdAsync(int userId);
Task<LibraryType> GetLibraryTypeAsync(int libraryId);
}
public class LibraryRepository : ILibraryRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -194,5 +209,4 @@ namespace API.Data.Repositories
} }
}
} }

View File

@ -2,14 +2,21 @@
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface IPersonRepository
{
void Attach(Person person);
void Remove(Person person);
Task<IList<Person>> GetAllPeople();
Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false);
}
public class PersonRepository : IPersonRepository
{ {
public class PersonRepository : IPersonRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -56,5 +63,4 @@ namespace API.Data.Repositories
return await _context.Person return await _context.Person
.ToListAsync(); .ToListAsync();
} }
}
} }

View File

@ -4,15 +4,28 @@ using System.Threading.Tasks;
using API.DTOs.ReadingLists; using API.DTOs.ReadingLists;
using API.Entities; using API.Entities;
using API.Helpers; using API.Helpers;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface IReadingListRepository
{
Task<PagedList<ReadingListDto>> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams);
Task<ReadingList> GetReadingListByIdAsync(int readingListId);
Task<IEnumerable<ReadingListItemDto>> GetReadingListItemDtosByIdAsync(int readingListId, int userId);
Task<ReadingListDto> GetReadingListDtoByIdAsync(int readingListId, int userId);
Task<IEnumerable<ReadingListItemDto>> AddReadingProgressModifiers(int userId, IList<ReadingListItemDto> items);
Task<ReadingListDto> GetReadingListDtoByTitleAsync(string title);
Task<IEnumerable<ReadingListItem>> GetReadingListItemsByIdAsync(int readingListId);
void Remove(ReadingListItem item);
void BulkRemove(IEnumerable<ReadingListItem> items);
void Update(ReadingList list);
}
public class ReadingListRepository : IReadingListRepository
{ {
public class ReadingListRepository : IReadingListRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -174,5 +187,4 @@ namespace API.Data.Repositories
} }
}
} }

View File

@ -1,11 +1,14 @@
using API.Entities; using API.Entities.Metadata;
using API.Entities.Metadata;
using API.Interfaces.Repositories;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface ISeriesMetadataRepository
{
void Update(SeriesMetadata seriesMetadata);
}
public class SeriesMetadataRepository : ISeriesMetadataRepository
{ {
public class SeriesMetadataRepository : ISeriesMetadataRepository
{
private readonly DataContext _context; private readonly DataContext _context;
public SeriesMetadataRepository(DataContext context) public SeriesMetadataRepository(DataContext context)
@ -17,5 +20,4 @@ namespace API.Data.Repositories
{ {
_context.SeriesMetadata.Update(seriesMetadata); _context.SeriesMetadata.Update(seriesMetadata);
} }
}
} }

View File

@ -11,16 +11,66 @@ using API.Entities.Enums;
using API.Entities.Metadata; using API.Entities.Metadata;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces.Repositories;
using API.Services.Tasks; using API.Services.Tasks;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface ISeriesRepository
{
void Attach(Series series);
void Update(Series series);
void Remove(Series series);
void Remove(IEnumerable<Series> series);
Task<bool> DoesSeriesNameExistInLibrary(string name, MangaFormat format);
/// <summary>
/// Adds user information like progress, ratings, etc
/// </summary>
/// <param name="libraryId"></param>
/// <param name="userId"></param>
/// <param name="userParams"></param>
/// <returns></returns>
Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter);
/// <summary>
/// Does not add user information like progress, ratings, etc.
/// </summary>
/// <param name="libraryIds"></param>
/// <param name="searchQuery">Series name to search for</param>
/// <returns></returns>
Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery);
Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId);
Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId);
Task<bool> DeleteSeriesAsync(int seriesId);
Task<Series> GetSeriesByIdAsync(int seriesId);
Task<IList<Series>> GetSeriesByIdsAsync(IList<int> seriesIds);
Task<int[]> GetChapterIdsForSeriesAsync(int[] seriesIds);
Task<IDictionary<int, IList<int>>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds);
/// <summary>
/// Used to add Progress/Rating information to series list.
/// </summary>
/// <param name="userId"></param>
/// <param name="series"></param>
/// <returns></returns>
Task AddSeriesModifiers(int userId, List<SeriesDto> series);
Task<string> GetSeriesCoverImageAsync(int seriesId);
Task<IEnumerable<SeriesDto>> GetOnDeck(int userId, int libraryId, UserParams userParams, FilterDto filter);
Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo
Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId);
Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams);
Task<IList<MangaFile>> GetFilesForSeries(int seriesId);
Task<IEnumerable<SeriesDto>> GetSeriesDtoForIdsAsync(IEnumerable<int> seriesIds, int userId);
Task<IList<string>> GetAllCoverImagesAsync();
Task<IEnumerable<string>> GetLockedCoverImagesAsync();
Task<PagedList<Series>> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams);
Task<Series> GetFullSeriesForSeriesIdAsync(int seriesId);
Task<Chunk> GetChunkInfo(int libraryId = 0);
Task<IList<SeriesMetadata>> GetSeriesMetadataForIdsAsync(IEnumerable<int> seriesIds);
}
public class SeriesRepository : ISeriesRepository
{ {
public class SeriesRepository : ISeriesRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
public SeriesRepository(DataContext context, IMapper mapper) public SeriesRepository(DataContext context, IMapper mapper)
@ -531,5 +581,4 @@ namespace API.Data.Repositories
.Include(sm => sm.CollectionTags) .Include(sm => sm.CollectionTags)
.ToListAsync(); .ToListAsync();
} }
}
} }

View File

@ -4,14 +4,20 @@ using System.Threading.Tasks;
using API.DTOs.Settings; using API.DTOs.Settings;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface ISettingsRepository
{
void Update(ServerSetting settings);
Task<ServerSettingDto> GetSettingsDtoAsync();
Task<ServerSetting> GetSettingAsync(ServerSettingKey key);
Task<IEnumerable<ServerSetting>> GetSettingsAsync();
}
public class SettingsRepository : ISettingsRepository
{ {
public class SettingsRepository : ISettingsRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -44,5 +50,4 @@ namespace API.Data.Repositories
{ {
return await _context.ServerSetting.ToListAsync(); return await _context.ServerSetting.ToListAsync();
} }
}
} }

View File

@ -6,26 +6,49 @@ using API.Constants;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader; using API.DTOs.Reader;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
[Flags]
public enum AppUserIncludes
{ {
[Flags]
public enum AppUserIncludes
{
None = 1, None = 1,
Progress = 2, Progress = 2,
Bookmarks = 4, Bookmarks = 4,
ReadingLists = 8, ReadingLists = 8,
Ratings = 16 Ratings = 16
} }
public class UserRepository : IUserRepository public interface IUserRepository
{ {
void Update(AppUser user);
void Update(AppUserPreferences preferences);
void Update(AppUserBookmark bookmark);
public void Delete(AppUser user);
Task<IEnumerable<MemberDto>> GetMembersAsync();
Task<IEnumerable<AppUser>> GetAdminUsersAsync();
Task<IEnumerable<AppUser>> GetNonAdminUsersAsync();
Task<bool> IsUserAdmin(AppUser user);
Task<AppUserRating> GetUserRating(int seriesId, int userId);
Task<AppUserPreferences> GetPreferencesAsync(string username);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForSeries(int userId, int seriesId);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForVolume(int userId, int volumeId);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForChapter(int userId, int chapterId);
Task<IEnumerable<BookmarkDto>> GetAllBookmarkDtos(int userId);
Task<AppUserBookmark> GetBookmarkForPage(int page, int chapterId, int userId);
Task<int> GetUserIdByApiKeyAsync(string apiKey);
Task<AppUser> GetUserByUsernameAsync(string username, AppUserIncludes includeFlags = AppUserIncludes.None);
Task<AppUser> GetUserByIdAsync(int userId, AppUserIncludes includeFlags = AppUserIncludes.None);
Task<int> GetUserIdByUsernameAsync(string username);
Task<AppUser> GetUserWithReadingListsByUsernameAsync(string username);
}
public class UserRepository : IUserRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly UserManager<AppUser> _userManager; private readonly UserManager<AppUser> _userManager;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -255,5 +278,4 @@ namespace API.Data.Repositories
.AsNoTracking() .AsNoTracking()
.ToListAsync(); .ToListAsync();
} }
}
} }

View File

@ -4,15 +4,29 @@ using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories;
public interface IVolumeRepository
{
void Add(Volume volume);
void Update(Volume volume);
void Remove(Volume volume);
Task<IList<MangaFile>> GetFilesForVolume(int volumeId);
Task<string> GetVolumeCoverImageAsync(int volumeId);
Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds);
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
Task<IEnumerable<Volume>> GetVolumes(int seriesId);
Task<Volume> GetVolumeByIdAsync(int volumeId);
}
public class VolumeRepository : IVolumeRepository
{ {
public class VolumeRepository : IVolumeRepository
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
@ -83,6 +97,7 @@ namespace API.Data.Repositories
/// Returns all volumes that contain a seriesId in passed array. /// Returns all volumes that contain a seriesId in passed array.
/// </summary> /// </summary>
/// <param name="seriesIds"></param> /// <param name="seriesIds"></param>
/// <param name="includeChapters">Include chapter entities</param>
/// <returns></returns> /// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false) public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false)
{ {
@ -157,7 +172,7 @@ namespace API.Data.Repositories
var volumes = await _context.Volume var volumes = await _context.Volume
.Where(vol => vol.SeriesId == seriesId) .Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters) .Include(vol => vol.Chapters)
.ThenInclude(c => c.People) // TODO: Measure cost of this .ThenInclude(c => c.People)
.OrderBy(volume => volume.Number) .OrderBy(volume => volume.Number)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider) .ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.AsNoTracking() .AsNoTracking()
@ -205,5 +220,4 @@ namespace API.Data.Repositories
} }
}
} }

View File

@ -35,13 +35,13 @@ namespace API.Data
} }
} }
public static async Task SeedSettings(DataContext context) public static async Task SeedSettings(DataContext context, IDirectoryService directoryService)
{ {
await context.Database.EnsureCreatedAsync(); await context.Database.EnsureCreatedAsync();
IList<ServerSetting> defaultSettings = new List<ServerSetting>() IList<ServerSetting> defaultSettings = new List<ServerSetting>()
{ {
new () {Key = ServerSettingKey.CacheDirectory, Value = DirectoryService.CacheDirectory}, new () {Key = ServerSettingKey.CacheDirectory, Value = directoryService.CacheDirectory},
new () {Key = ServerSettingKey.TaskScan, Value = "daily"}, new () {Key = ServerSettingKey.TaskScan, Value = "daily"},
new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, // Not used from DB, but DB is sync with appSettings.json new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, // Not used from DB, but DB is sync with appSettings.json
new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"},
@ -71,7 +71,7 @@ namespace API.Data
context.ServerSetting.First(s => s.Key == ServerSettingKey.LoggingLevel).Value = context.ServerSetting.First(s => s.Key == ServerSettingKey.LoggingLevel).Value =
Configuration.LogLevel + string.Empty; Configuration.LogLevel + string.Empty;
context.ServerSetting.First(s => s.Key == ServerSettingKey.CacheDirectory).Value = context.ServerSetting.First(s => s.Key == ServerSettingKey.CacheDirectory).Value =
DirectoryService.CacheDirectory + string.Empty; directoryService.CacheDirectory + string.Empty;
context.ServerSetting.First(s => s.Key == ServerSettingKey.BackupDirectory).Value = context.ServerSetting.First(s => s.Key == ServerSettingKey.BackupDirectory).Value =
DirectoryService.BackupDirectory + string.Empty; DirectoryService.BackupDirectory + string.Empty;

View File

@ -1,15 +1,33 @@
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data.Repositories; using API.Data.Repositories;
using API.Entities; using API.Entities;
using API.Interfaces;
using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
namespace API.Data namespace API.Data;
public interface IUnitOfWork
{
ISeriesRepository SeriesRepository { get; }
IUserRepository UserRepository { get; }
ILibraryRepository LibraryRepository { get; }
IVolumeRepository VolumeRepository { get; }
ISettingsRepository SettingsRepository { get; }
IAppUserProgressRepository AppUserProgressRepository { get; }
ICollectionTagRepository CollectionTagRepository { get; }
IChapterRepository ChapterRepository { get; }
IReadingListRepository ReadingListRepository { get; }
ISeriesMetadataRepository SeriesMetadataRepository { get; }
IPersonRepository PersonRepository { get; }
IGenreRepository GenreRepository { get; }
bool Commit();
Task<bool> CommitAsync();
bool HasChanges();
bool Rollback();
Task<bool> RollbackAsync();
}
public class UnitOfWork : IUnitOfWork
{ {
public class UnitOfWork : IUnitOfWork
{
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly UserManager<AppUser> _userManager; private readonly UserManager<AppUser> _userManager;
@ -81,5 +99,4 @@ namespace API.Data
_context.Dispose(); _context.Dispose();
return true; return true;
} }
}
} }

View File

@ -2,7 +2,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using API.Entities.Enums; using API.Entities.Enums;
using API.Entities.Interfaces; using API.Entities.Interfaces;
using API.Entities.Metadata;
using API.Parser; using API.Parser;
namespace API.Entities namespace API.Entities

View File

@ -8,7 +8,7 @@ namespace API.Entities
public class Genre public class Genre
{ {
public int Id { get; set; } public int Id { get; set; }
public string Title { get; set; } // TODO: Rename this to Title public string Title { get; set; }
public string NormalizedTitle { get; set; } public string NormalizedTitle { get; set; }
public bool ExternalTag { get; set; } public bool ExternalTag { get; set; }

View File

@ -1,8 +1,6 @@
using System.IO.Abstractions; using System.IO.Abstractions;
using API.Data; using API.Data;
using API.Helpers; using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using API.Services.Tasks; using API.Services.Tasks;
using API.SignalR.Presence; using API.SignalR.Presence;
@ -37,6 +35,7 @@ namespace API.Extensions
services.AddScoped<IVersionUpdaterService, VersionUpdaterService>(); services.AddScoped<IVersionUpdaterService, VersionUpdaterService>();
services.AddScoped<IDownloadService, DownloadService>(); services.AddScoped<IDownloadService, DownloadService>();
services.AddScoped<IReaderService, ReaderService>(); services.AddScoped<IReaderService, ReaderService>();
services.AddScoped<IReadingItemService, ReadingItemService>();
services.AddScoped<IAccountService, AccountService>(); services.AddScoped<IAccountService, AccountService>();

View File

@ -1,86 +0,0 @@
using System.IO;
using System.Linq;
using API.Comparators;
namespace API.Extensions
{
public static class DirectoryInfoExtensions
{
private static readonly NaturalSortComparer Comparer = new NaturalSortComparer();
public static void Empty(this DirectoryInfo directory)
{
// NOTE: We have this in DirectoryService.Empty(), do we need this here?
foreach(FileInfo file in directory.EnumerateFiles()) file.Delete();
foreach(DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true);
}
public static void RemoveNonImages(this DirectoryInfo directory)
{
foreach (var file in directory.EnumerateFiles())
{
if (!Parser.Parser.IsImage(file.FullName))
{
file.Delete();
}
}
}
/// <summary>
/// Flattens all files in subfolders to the passed directory recursively.
///
///
/// foo<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// ├── 3.txt<para />
/// ├── 4.txt<para />
/// └── bar<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// └── 5.txt<para />
///
/// becomes:<para />
/// foo<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// ├── 3.txt<para />
/// ├── 4.txt<para />
/// ├── bar_1.txt<para />
/// ├── bar_2.txt<para />
/// └── bar_5.txt<para />
/// </summary>
/// <param name="directory"></param>
public static void Flatten(this DirectoryInfo directory)
{
var index = 0;
FlattenDirectory(directory, directory, ref index);
}
private static void FlattenDirectory(DirectoryInfo root, DirectoryInfo directory, ref int directoryIndex)
{
if (!root.FullName.Equals(directory.FullName))
{
var fileIndex = 1;
foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, Comparer))
{
if (file.Directory == null) continue;
var paddedIndex = Parser.Parser.PadZeros(directoryIndex + "");
// We need to rename the files so that after flattening, they are in the order we found them
var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}";
var newPath = Path.Join(root.FullName, newName);
if (!File.Exists(newPath)) file.MoveTo(newPath);
fileIndex++;
}
directoryIndex++;
}
var sort = new NaturalSortComparer();
foreach (var subDirectory in directory.EnumerateDirectories().OrderBy(d => d.FullName, sort))
{
FlattenDirectory(root, subDirectory, ref directoryIndex);
}
}
}
}

View File

@ -1,10 +1,4 @@
using System; namespace API.Extensions
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace API.Extensions
{ {
public static class EnumerableExtensions public static class EnumerableExtensions
{ {

View File

@ -1,5 +1,7 @@
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Runtime.Intrinsics.Arm;
using System.Security.Cryptography;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using API.Helpers; using API.Helpers;
@ -30,7 +32,8 @@ namespace API.Extensions
public static void AddCacheHeader(this HttpResponse response, byte[] content) public static void AddCacheHeader(this HttpResponse response, byte[] content)
{ {
if (content == null || content.Length <= 0) return; if (content == null || content.Length <= 0) return;
using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider(); using var sha1 = SHA256.Create();
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(content).Select(x => x.ToString("X2")))); response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(content).Select(x => x.ToString("X2"))));
} }
@ -43,7 +46,7 @@ namespace API.Extensions
{ {
if (filename == null || filename.Length <= 0) return; if (filename == null || filename.Length <= 0) return;
var hashContent = filename + File.GetLastWriteTimeUtc(filename); var hashContent = filename + File.GetLastWriteTimeUtc(filename);
using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider(); using var sha1 = SHA256.Create();
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2")))); response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2"))));
} }

View File

@ -1,5 +1,4 @@
using System; using System;
using System.IO;
using API.Entities; using API.Entities;
using API.Entities.Interfaces; using API.Entities.Interfaces;
using API.Services; using API.Services;

View File

@ -36,26 +36,14 @@ public static class GenreHelper
public static void KeepOnlySameGenreBetweenLists(ICollection<Genre> existingGenres, ICollection<Genre> removeAllExcept, Action<Genre> action = null) public static void KeepOnlySameGenreBetweenLists(ICollection<Genre> existingGenres, ICollection<Genre> removeAllExcept, Action<Genre> action = null)
{ {
// var normalizedNames = names.Select(s => Parser.Parser.Normalize(s.Trim()))
// .Where(s => !string.IsNullOrEmpty(s)).ToList();
// var localNamesNotInComicInfos = seriesGenres.Where(g =>
// !normalizedNames.Contains(g.NormalizedName) && g.ExternalTag == isExternal);
//
// foreach (var nonExisting in localNamesNotInComicInfos)
// {
// // TODO: Maybe I need to do a cleanup here
// action(nonExisting);
// }
var existing = existingGenres.ToList(); var existing = existingGenres.ToList();
foreach (var genre in existing) foreach (var genre in existing)
{ {
var existingPerson = removeAllExcept.FirstOrDefault(g => g.ExternalTag == genre.ExternalTag && genre.NormalizedTitle.Equals(g.NormalizedTitle)); var existingPerson = removeAllExcept.FirstOrDefault(g => g.ExternalTag == genre.ExternalTag && genre.NormalizedTitle.Equals(g.NormalizedTitle));
if (existingPerson == null) if (existingPerson != null) continue;
{
existingGenres.Remove(genre); existingGenres.Remove(genre);
action?.Invoke(genre); action?.Invoke(genre);
} }
}
} }

View File

@ -1,22 +0,0 @@
using System.Threading.Tasks;
namespace API.Interfaces
{
public interface ITaskScheduler
{
/// <summary>
/// For use on Server startup
/// </summary>
void ScheduleTasks();
Task ScheduleStatsTasks();
void ScheduleUpdaterTasks();
void ScanLibrary(int libraryId, bool forceUpdate = false);
void CleanupChapters(int[] chapterIds);
void RefreshMetadata(int libraryId, bool forceUpdate = true);
void CleanupTemp();
void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false);
void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false);
void CancelStatsTasks();
Task RunStatCollection();
}
}

View File

@ -1,26 +0,0 @@
using System.Threading.Tasks;
using API.Interfaces.Repositories;
namespace API.Interfaces
{
public interface IUnitOfWork
{
ISeriesRepository SeriesRepository { get; }
IUserRepository UserRepository { get; }
ILibraryRepository LibraryRepository { get; }
IVolumeRepository VolumeRepository { get; }
ISettingsRepository SettingsRepository { get; }
IAppUserProgressRepository AppUserProgressRepository { get; }
ICollectionTagRepository CollectionTagRepository { get; }
IChapterRepository ChapterRepository { get; }
IReadingListRepository ReadingListRepository { get; }
ISeriesMetadataRepository SeriesMetadataRepository { get; }
IPersonRepository PersonRepository { get; }
IGenreRepository GenreRepository { get; }
bool Commit();
Task<bool> CommitAsync();
bool HasChanges();
bool Rollback();
Task<bool> RollbackAsync();
}
}

View File

@ -1,14 +0,0 @@
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
namespace API.Interfaces.Repositories
{
public interface IAppUserProgressRepository
{
void Update(AppUserProgress userProgress);
Task<int> CleanupAbandonedChapters();
Task<bool> UserHasProgress(LibraryType libraryType, int userId);
Task<AppUserProgress> GetUserProgressAsync(int chapterId, int userId);
}
}

View File

@ -1,24 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Reader;
using API.Entities;
namespace API.Interfaces.Repositories
{
public interface IChapterRepository
{
void Update(Chapter chapter);
Task<IEnumerable<Chapter>> GetChaptersByIdsAsync(IList<int> chapterIds);
Task<IChapterInfoDto> GetChapterInfoDtoAsync(int chapterId);
Task<int> GetChapterTotalPagesAsync(int chapterId);
Task<Chapter> GetChapterAsync(int chapterId);
Task<ChapterDto> GetChapterDtoAsync(int chapterId);
Task<IList<MangaFile>> GetFilesForChapterAsync(int chapterId);
Task<IList<Chapter>> GetChaptersAsync(int volumeId);
Task<IList<MangaFile>> GetFilesForChaptersAsync(IReadOnlyList<int> chapterIds);
Task<string> GetChapterCoverImageAsync(int chapterId);
Task<IList<string>> GetAllCoverImagesAsync();
Task<IEnumerable<string>> GetCoverImagesForLockedChaptersAsync();
}
}

View File

@ -1,23 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs.CollectionTags;
using API.Entities;
namespace API.Interfaces.Repositories
{
public interface ICollectionTagRepository
{
void Add(CollectionTag tag);
void Remove(CollectionTag tag);
Task<IEnumerable<CollectionTagDto>> GetAllTagDtosAsync();
Task<IEnumerable<CollectionTagDto>> SearchTagDtosAsync(string searchQuery);
Task<string> GetCoverImageAsync(int collectionTagId);
Task<IEnumerable<CollectionTagDto>> GetAllPromotedTagDtosAsync();
Task<CollectionTag> GetTagAsync(int tagId);
Task<CollectionTag> GetFullTagAsync(int tagId);
void Update(CollectionTag tag);
Task<int> RemoveTagsWithoutSeries();
Task<IEnumerable<CollectionTag>> GetAllTagsAsync();
Task<IList<string>> GetAllCoverImagesAsync();
}
}

View File

@ -1,15 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces.Repositories
{
public interface IGenreRepository
{
void Attach(Genre genre);
void Remove(Genre genre);
Task<Genre> FindByNameAsync(string genreName);
Task<IList<Genre>> GetAllGenres();
Task RemoveAllGenreNoLongerAssociated(bool removeExternal = false);
}
}

View File

@ -1,26 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
namespace API.Interfaces.Repositories
{
public interface ILibraryRepository
{
void Add(Library library);
void Update(Library library);
void Delete(Library library);
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
Task<bool> LibraryExists(string libraryName);
Task<Library> GetLibraryForIdAsync(int libraryId, LibraryIncludes includes);
Task<Library> GetFullLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId);
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
Task<IEnumerable<Library>> GetLibrariesAsync();
Task<bool> DeleteLibrary(int libraryId);
Task<IEnumerable<Library>> GetLibrariesForUserIdAsync(int userId);
Task<LibraryType> GetLibraryTypeAsync(int libraryId);
}
}

View File

@ -1,14 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces.Repositories
{
public interface IPersonRepository
{
void Attach(Person person);
void Remove(Person person);
Task<IList<Person>> GetAllPeople();
Task RemoveAllPeopleNoLongerAssociated(bool removeExternal = false);
}
}

View File

@ -1,22 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs.ReadingLists;
using API.Entities;
using API.Helpers;
namespace API.Interfaces.Repositories
{
public interface IReadingListRepository
{
Task<PagedList<ReadingListDto>> GetReadingListDtosForUserAsync(int userId, bool includePromoted, UserParams userParams);
Task<ReadingList> GetReadingListByIdAsync(int readingListId);
Task<IEnumerable<ReadingListItemDto>> GetReadingListItemDtosByIdAsync(int readingListId, int userId);
Task<ReadingListDto> GetReadingListDtoByIdAsync(int readingListId, int userId);
Task<IEnumerable<ReadingListItemDto>> AddReadingProgressModifiers(int userId, IList<ReadingListItemDto> items);
Task<ReadingListDto> GetReadingListDtoByTitleAsync(string title);
Task<IEnumerable<ReadingListItem>> GetReadingListItemsByIdAsync(int readingListId);
void Remove(ReadingListItem item);
void BulkRemove(IEnumerable<ReadingListItem> items);
void Update(ReadingList list);
}
}

View File

@ -1,10 +0,0 @@
using API.Entities;
using API.Entities.Metadata;
namespace API.Interfaces.Repositories
{
public interface ISeriesMetadataRepository
{
void Update(SeriesMetadata seriesMetadata);
}
}

View File

@ -1,63 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Scanner;
using API.DTOs;
using API.DTOs.Filtering;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Metadata;
using API.Helpers;
namespace API.Interfaces.Repositories
{
public interface ISeriesRepository
{
void Attach(Series series);
void Update(Series series);
void Remove(Series series);
void Remove(IEnumerable<Series> series);
Task<bool> DoesSeriesNameExistInLibrary(string name, MangaFormat format);
/// <summary>
/// Adds user information like progress, ratings, etc
/// </summary>
/// <param name="libraryId"></param>
/// <param name="userId"></param>
/// <param name="userParams"></param>
/// <returns></returns>
Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter);
/// <summary>
/// Does not add user information like progress, ratings, etc.
/// </summary>
/// <param name="libraryIds"></param>
/// <param name="searchQuery">Series name to search for</param>
/// <returns></returns>
Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery);
Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId);
Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId);
Task<bool> DeleteSeriesAsync(int seriesId);
Task<Series> GetSeriesByIdAsync(int seriesId);
Task<IList<Series>> GetSeriesByIdsAsync(IList<int> seriesIds);
Task<int[]> GetChapterIdsForSeriesAsync(int[] seriesIds);
Task<IDictionary<int, IList<int>>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds);
/// <summary>
/// Used to add Progress/Rating information to series list.
/// </summary>
/// <param name="userId"></param>
/// <param name="series"></param>
/// <returns></returns>
Task AddSeriesModifiers(int userId, List<SeriesDto> series);
Task<string> GetSeriesCoverImageAsync(int seriesId);
Task<IEnumerable<SeriesDto>> GetOnDeck(int userId, int libraryId, UserParams userParams, FilterDto filter);
Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo
Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId);
Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams);
Task<IList<MangaFile>> GetFilesForSeries(int seriesId);
Task<IEnumerable<SeriesDto>> GetSeriesDtoForIdsAsync(IEnumerable<int> seriesIds, int userId);
Task<IList<string>> GetAllCoverImagesAsync();
Task<IEnumerable<string>> GetLockedCoverImagesAsync();
Task<PagedList<Series>> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams);
Task<Series> GetFullSeriesForSeriesIdAsync(int seriesId);
Task<Chunk> GetChunkInfo(int libraryId = 0);
Task<IList<SeriesMetadata>> GetSeriesMetadataForIdsAsync(IEnumerable<int> seriesIds);
}
}

View File

@ -1,17 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
namespace API.Interfaces.Repositories
{
public interface ISettingsRepository
{
void Update(ServerSetting settings);
Task<ServerSettingDto> GetSettingsDtoAsync();
Task<ServerSetting> GetSettingAsync(ServerSettingKey key);
Task<IEnumerable<ServerSetting>> GetSettingsAsync();
}
}

View File

@ -1,33 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs;
using API.DTOs.Reader;
using API.Entities;
namespace API.Interfaces.Repositories
{
public interface IUserRepository
{
void Update(AppUser user);
void Update(AppUserPreferences preferences);
void Update(AppUserBookmark bookmark);
public void Delete(AppUser user);
Task<IEnumerable<MemberDto>> GetMembersAsync();
Task<IEnumerable<AppUser>> GetAdminUsersAsync();
Task<IEnumerable<AppUser>> GetNonAdminUsersAsync();
Task<bool> IsUserAdmin(AppUser user);
Task<AppUserRating> GetUserRating(int seriesId, int userId);
Task<AppUserPreferences> GetPreferencesAsync(string username);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForSeries(int userId, int seriesId);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForVolume(int userId, int volumeId);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForChapter(int userId, int chapterId);
Task<IEnumerable<BookmarkDto>> GetAllBookmarkDtos(int userId);
Task<AppUserBookmark> GetBookmarkForPage(int page, int chapterId, int userId);
Task<int> GetUserIdByApiKeyAsync(string apiKey);
Task<AppUser> GetUserByUsernameAsync(string username, AppUserIncludes includeFlags = AppUserIncludes.None);
Task<AppUser> GetUserByIdAsync(int userId, AppUserIncludes includeFlags = AppUserIncludes.None);
Task<int> GetUserIdByUsernameAsync(string username);
Task<AppUser> GetUserWithReadingListsByUsernameAsync(string username);
}
}

View File

@ -1,25 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs;
using API.Entities;
namespace API.Interfaces.Repositories
{
public interface IVolumeRepository
{
void Add(Volume volume);
void Update(Volume volume);
void Remove(Volume volume);
Task<IList<MangaFile>> GetFilesForVolume(int volumeId);
Task<string> GetVolumeCoverImageAsync(int volumeId);
Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds);
// From Series Repo
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
Task<IEnumerable<Volume>> GetVolumes(int seriesId);
Task<Volume> GetVolumeByIdAsync(int volumeId);
}
}

View File

@ -1,12 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
using API.Errors;
namespace API.Interfaces.Services
{
public interface IAccountService
{
Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword);
}
}

View File

@ -1,21 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO.Compression;
using System.Threading.Tasks;
using API.Archive;
using API.Data.Metadata;
namespace API.Interfaces.Services
{
public interface IArchiveService
{
void ExtractArchive(string archivePath, string extractPath);
int GetNumberOfPagesFromArchive(string archivePath);
string GetCoverImage(string archivePath, string fileName);
bool IsValidArchive(string archivePath);
ComicInfo GetComicInfo(string archivePath);
ArchiveLibrary CanOpen(string archivePath);
bool ArchiveNeedsFlattening(ZipArchive archive);
Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder);
}
}

View File

@ -1,20 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
namespace API.Interfaces.Services
{
public interface IBackupService
{
Task BackupDatabase();
/// <summary>
/// Returns a list of full paths of the logs files detailed in <see cref="IConfiguration"/>.
/// </summary>
/// <param name="maxRollingFiles"></param>
/// <param name="logFileName"></param>
/// <returns></returns>
IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName);
void CleanupBackups();
}
}

View File

@ -1,33 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data.Metadata;
using API.Parser;
using VersOne.Epub;
namespace API.Interfaces.Services
{
public interface IBookService
{
int GetNumberOfPages(string filePath);
string GetCoverImage(string fileFilePath, string fileName);
Task<Dictionary<string, int>> CreateKeyToPageMappingAsync(EpubBookRef book);
/// <summary>
/// Scopes styles to .reading-section and replaces img src to the passed apiBase
/// </summary>
/// <param name="stylesheetHtml"></param>
/// <param name="apiBase"></param>
/// <param name="filename">If the stylesheetHtml contains Import statements, when scoping the filename, scope needs to be wrt filepath.</param>
/// <param name="book">Book Reference, needed for if you expect Import statements</param>
/// <returns></returns>
Task<string> ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book);
ComicInfo GetComicInfo(string filePath);
ParserInfo ParseInfo(string filePath);
/// <summary>
/// Extracts a PDF file's pages as images to an target directory
/// </summary>
/// <param name="fileFilePath"></param>
/// <param name="targetDirectory">Where the files will be extracted to. If doesn't exist, will be created.</param>
void ExtractPdfImages(string fileFilePath, string targetDirectory);
}
}

View File

@ -1,41 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces.Services
{
public interface ICacheService
{
/// <summary>
/// Ensures the cache is created for the given chapter and if not, will create it. Should be called before any other
/// cache operations (except cleanup).
/// </summary>
/// <param name="chapterId"></param>
/// <returns>Chapter for the passed chapterId. Side-effect from ensuring cache.</returns>
Task<Chapter> Ensure(int chapterId);
/// <summary>
/// Clears cache directory of all folders and files.
/// </summary>
void Cleanup();
/// <summary>
/// Clears cache directory of all volumes. This can be invoked from deleting a library or a series.
/// </summary>
/// <param name="chapterIds">Volumes that belong to that library. Assume the library might have been deleted before this invocation.</param>
void CleanupChapters(IEnumerable<int> chapterIds);
/// <summary>
/// Returns the absolute path of a cached page.
/// </summary>
/// <param name="chapter">Chapter entity with Files populated.</param>
/// <param name="page">Page number to look for</param>
/// <returns></returns>
Task<(string path, MangaFile file)> GetCachedPagePath(Chapter chapter, int page);
void EnsureCacheDirectory();
string GetCachedEpubFile(int chapterId, Chapter chapter);
public void ExtractChapterFiles(string extractPath, IReadOnlyList<MangaFile> files);
}
}

View File

@ -1,10 +0,0 @@
using System.Threading.Tasks;
namespace API.Interfaces.Services
{
public interface ICleanupService
{
Task Cleanup();
void CleanupCacheDirectory();
}
}

View File

@ -1,22 +0,0 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace API.Interfaces.Services
{
public interface IDirectoryService
{
/// <summary>
/// Lists out top-level folders for a given directory. Filters out System and Hidden folders.
/// </summary>
/// <param name="rootPath">Absolute path of directory to scan.</param>
/// <returns>List of folder names</returns>
IEnumerable<string> ListDirectory(string rootPath);
Task<byte[]> ReadFileAsync(string path);
bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "");
bool Exists(string directory);
void CopyFileToDirectory(string fullFilePath, string targetDirectory);
int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern, ILogger logger);
}
}

View File

@ -1,23 +0,0 @@
using API.Entities;
using API.Services;
namespace API.Interfaces.Services
{
public interface IImageService
{
string GetCoverImage(string path, string fileName);
string GetCoverFile(MangaFile file);
/// <summary>
/// Creates a Thumbnail version of an image
/// </summary>
/// <param name="path">Path to the image file</param>
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
public string CreateThumbnail(string path, string fileName);
/// <summary>
/// Creates a Thumbnail version of a base64 image
/// </summary>
/// <param name="encodedImage">base64 encoded image</param>
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
public string CreateThumbnailFromBase64(string encodedImage, string fileName);
}
}

View File

@ -1,20 +0,0 @@
using System.Threading.Tasks;
namespace API.Interfaces.Services
{
public interface IMetadataService
{
/// <summary>
/// Recalculates metadata for all entities in a library.
/// </summary>
/// <param name="libraryId"></param>
/// <param name="forceUpdate"></param>
Task RefreshMetadata(int libraryId, bool forceUpdate = false);
/// <summary>
/// Performs a forced refresh of metatdata just for a series and it's nested entities
/// </summary>
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false);
}
}

View File

@ -1,17 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs;
using API.Entities;
namespace API.Interfaces.Services
{
public interface IReaderService
{
void MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable<Chapter> chapters);
void MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable<Chapter> chapters);
Task<bool> SaveReadingProgress(ProgressDto progressDto, int userId);
Task<int> CapPageToChapter(int chapterId, int page);
Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId);
Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId);
}
}

View File

@ -1,18 +0,0 @@

using System.Threading;
using System.Threading.Tasks;
namespace API.Interfaces.Services
{
public interface IScannerService
{
/// <summary>
/// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite
/// cover images if forceUpdate is true.
/// </summary>
/// <param name="libraryId">Library to scan against</param>
Task ScanLibrary(int libraryId);
Task ScanLibraries();
Task ScanSeries(int libraryId, int seriesId, CancellationToken token);
}
}

View File

@ -1,10 +0,0 @@
using System.Threading;
using System.Threading.Tasks;
namespace API.Interfaces.Services
{
public interface IStartupTask
{
Task ExecuteAsync(CancellationToken cancellationToken = default);
}
}

View File

@ -1,11 +0,0 @@
using System.Threading.Tasks;
using API.DTOs.Stats;
namespace API.Interfaces.Services
{
public interface IStatsService
{
Task Send();
Task<ServerInfoDto> GetServerInfo();
}
}

View File

@ -1,10 +0,0 @@
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces.Services
{
public interface ITokenService
{
Task<string> CreateToken(AppUser user);
}
}

View File

@ -1,13 +0,0 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.DTOs.Update;
namespace API.Interfaces.Services
{
public interface IVersionUpdaterService
{
Task<UpdateNotificationDto> CheckForUpdate();
Task PushUpdate(UpdateNotificationDto update);
Task<IEnumerable<UpdateNotificationDto>> GetAllReleases();
}
}

View File

@ -1,310 +0,0 @@

using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.Data.Repositories;
using API.DTOs;
using API.Entities;
using Microsoft.Extensions.Logging;
namespace API.Interfaces.Services
{
public class ReaderService : IReaderService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<ReaderService> _logger;
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
public ReaderService(IUnitOfWork unitOfWork, ILogger<ReaderService> logger)
{
_unitOfWork = unitOfWork;
_logger = logger;
}
/// <summary>
/// Marks all Chapters as Read by creating or updating UserProgress rows. Does not commit.
/// </summary>
/// <param name="user"></param>
/// <param name="seriesId"></param>
/// <param name="chapters"></param>
public void MarkChaptersAsRead(AppUser user, int seriesId, IEnumerable<Chapter> chapters)
{
foreach (var chapter in chapters)
{
var userProgress = GetUserProgressForChapter(user, chapter);
if (userProgress == null)
{
user.Progresses.Add(new AppUserProgress
{
PagesRead = chapter.Pages,
VolumeId = chapter.VolumeId,
SeriesId = seriesId,
ChapterId = chapter.Id
});
}
else
{
userProgress.PagesRead = chapter.Pages;
userProgress.SeriesId = seriesId;
userProgress.VolumeId = chapter.VolumeId;
}
}
}
/// <summary>
/// Marks all Chapters as Unread by creating or updating UserProgress rows. Does not commit.
/// </summary>
/// <param name="user"></param>
/// <param name="seriesId"></param>
/// <param name="chapters"></param>
public void MarkChaptersAsUnread(AppUser user, int seriesId, IEnumerable<Chapter> chapters)
{
foreach (var chapter in chapters)
{
var userProgress = GetUserProgressForChapter(user, chapter);
if (userProgress == null)
{
user.Progresses.Add(new AppUserProgress
{
PagesRead = 0,
VolumeId = chapter.VolumeId,
SeriesId = seriesId,
ChapterId = chapter.Id
});
}
else
{
userProgress.PagesRead = 0;
userProgress.SeriesId = seriesId;
userProgress.VolumeId = chapter.VolumeId;
}
}
}
/// <summary>
/// Gets the User Progress for a given Chapter. This will handle any duplicates that might have occured in past versions and will delete them. Does not commit.
/// </summary>
/// <param name="user"></param>
/// <param name="chapter"></param>
/// <returns></returns>
public static AppUserProgress GetUserProgressForChapter(AppUser user, Chapter chapter)
{
AppUserProgress userProgress = null;
try
{
userProgress =
user.Progresses.SingleOrDefault(x => x.ChapterId == chapter.Id && x.AppUserId == user.Id);
}
catch (Exception)
{
// There is a very rare chance that user progress will duplicate current row. If that happens delete one with less pages
var progresses = user.Progresses.Where(x => x.ChapterId == chapter.Id && x.AppUserId == user.Id).ToList();
if (progresses.Count > 1)
{
user.Progresses = new List<AppUserProgress>()
{
user.Progresses.First()
};
userProgress = user.Progresses.First();
}
}
return userProgress;
}
/// <summary>
/// Saves progress to DB
/// </summary>
/// <param name="progressDto"></param>
/// <param name="userId"></param>
/// <returns></returns>
public async Task<bool> SaveReadingProgress(ProgressDto progressDto, int userId)
{
// Don't let user save past total pages.
progressDto.PageNum = await CapPageToChapter(progressDto.ChapterId, progressDto.PageNum);
try
{
var userProgress =
await _unitOfWork.AppUserProgressRepository.GetUserProgressAsync(progressDto.ChapterId, userId);
if (userProgress == null)
{
// Create a user object
var userWithProgress =
await _unitOfWork.UserRepository.GetUserByIdAsync(userId, AppUserIncludes.Progress);
userWithProgress.Progresses ??= new List<AppUserProgress>();
userWithProgress.Progresses.Add(new AppUserProgress
{
PagesRead = progressDto.PageNum,
VolumeId = progressDto.VolumeId,
SeriesId = progressDto.SeriesId,
ChapterId = progressDto.ChapterId,
BookScrollId = progressDto.BookScrollId,
LastModified = DateTime.Now
});
_unitOfWork.UserRepository.Update(userWithProgress);
}
else
{
userProgress.PagesRead = progressDto.PageNum;
userProgress.SeriesId = progressDto.SeriesId;
userProgress.VolumeId = progressDto.VolumeId;
userProgress.BookScrollId = progressDto.BookScrollId;
userProgress.LastModified = DateTime.Now;
_unitOfWork.AppUserProgressRepository.Update(userProgress);
}
if (await _unitOfWork.CommitAsync())
{
return true;
}
}
catch (Exception exception)
{
_logger.LogError(exception, "Could not save progress");
await _unitOfWork.RollbackAsync();
}
return false;
}
/// <summary>
/// Ensures that the page is within 0 and total pages for a chapter. Makes one DB call.
/// </summary>
/// <param name="chapterId"></param>
/// <param name="page"></param>
/// <returns></returns>
public async Task<int> CapPageToChapter(int chapterId, int page)
{
var totalPages = await _unitOfWork.ChapterRepository.GetChapterTotalPagesAsync(chapterId);
if (page > totalPages)
{
page = totalPages;
}
if (page < 0)
{
page = 0;
}
return page;
}
/// <summary>
/// Tries to find the next logical Chapter
/// </summary>
/// <example>
/// V1 → V2 → V3 chapter 0 → V3 chapter 10 → SP 01 → SP 02
/// </example>
/// <param name="seriesId"></param>
/// <param name="volumeId"></param>
/// <param name="currentChapterId"></param>
/// <param name="userId"></param>
/// <returns>-1 if nothing can be found</returns>
public async Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
if (currentVolume.Number == 0)
{
// Handle specials by sorting on their Filename aka Range
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()), currentChapter.Number);
if (chapterId > 0) return chapterId;
}
foreach (var volume in volumes)
{
if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1)
{
// Handle Chapters within current Volume
// In this case, i need 0 first because 0 represents a full volume file.
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting), currentChapter.Number);
if (chapterId > 0) return chapterId;
}
if (volume.Number != currentVolume.Number + 1) continue;
// Handle Chapters within next Volume
// ! When selecting the chapter for the next volume, we need to make sure a c0 comes before a c1+
var chapters = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).ToList();
if (currentChapter.Number.Equals("0") && chapters.Last().Number.Equals("0"))
{
return chapters.Last().Id;
}
var firstChapter = chapters.FirstOrDefault();
if (firstChapter == null) return -1;
return firstChapter.Id;
}
return -1;
}
/// <summary>
/// Tries to find the prev logical Chapter
/// </summary>
/// <example>
/// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← SP 01 ← SP 02
/// </example>
/// <param name="seriesId"></param>
/// <param name="volumeId"></param>
/// <param name="currentChapterId"></param>
/// <param name="userId"></param>
/// <returns>-1 if nothing can be found</returns>
public async Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{
var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
if (currentVolume.Number == 0)
{
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, new NaturalSortComparer()).Reverse(), currentChapter.Number);
if (chapterId > 0) return chapterId;
}
foreach (var volume in volumes)
{
if (volume.Number == currentVolume.Number)
{
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).Reverse(), currentChapter.Number);
if (chapterId > 0) return chapterId;
}
if (volume.Number == currentVolume.Number - 1)
{
var lastChapter = volume.Chapters
.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).LastOrDefault();
if (lastChapter == null) return -1;
return lastChapter.Id;
}
}
return -1;
}
private static int GetNextChapterId(IEnumerable<ChapterDto> chapters, string currentChapterNumber)
{
var next = false;
var chaptersList = chapters.ToList();
foreach (var chapter in chaptersList)
{
if (next)
{
return chapter.Id;
}
if (currentChapterNumber.Equals(chapter.Number)) next = true;
}
return -1;
}
}
}

160
API/Parser/DefaultParser.cs Normal file
View File

@ -0,0 +1,160 @@
using System.IO;
using System.Linq;
using API.Entities.Enums;
using API.Services;
namespace API.Parser;
/// <summary>
/// This is an implementation of the Parser that is the basis for everything
/// </summary>
public class DefaultParser
{
private readonly IDirectoryService _directoryService;
public DefaultParser(IDirectoryService directoryService)
{
_directoryService = directoryService;
}
/// <summary>
/// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed
/// from filename.
/// </summary>
/// <param name="filePath"></param>
/// <param name="rootPath">Root folder</param>
/// <param name="type">Defaults to Manga. Allows different Regex to be used for parsing.</param>
/// <returns><see cref="ParserInfo"/> or null if Series was empty</returns>
public ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga)
{
var fileName = _directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
ParserInfo ret;
if (Parser.IsEpub(filePath))
{
ret = new ParserInfo()
{
Chapters = Parser.ParseChapter(fileName) ?? Parser.ParseComicChapter(fileName),
Series = Parser.ParseSeries(fileName) ?? Parser.ParseComicSeries(fileName),
Volumes = Parser.ParseVolume(fileName) ?? Parser.ParseComicVolume(fileName),
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
FullFilePath = filePath
};
}
else
{
ret = new ParserInfo()
{
Chapters = type == LibraryType.Manga ? Parser.ParseChapter(fileName) : Parser.ParseComicChapter(fileName),
Series = type == LibraryType.Manga ? Parser.ParseSeries(fileName) : Parser.ParseComicSeries(fileName),
Volumes = type == LibraryType.Manga ? Parser.ParseVolume(fileName) : Parser.ParseComicVolume(fileName),
Filename = Path.GetFileName(filePath),
Format = Parser.ParseFormat(filePath),
Title = Path.GetFileNameWithoutExtension(fileName),
FullFilePath = filePath
};
}
if (Parser.IsImage(filePath) && Parser.IsCoverImage(filePath)) return null;
if (Parser.IsImage(filePath))
{
// Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders.
ret.Volumes = Parser.DefaultVolume;
ret.Chapters = Parser.DefaultChapter;
ret.Series = string.Empty;
}
if (ret.Series == string.Empty || Parser.IsImage(filePath))
{
// Try to parse information out of each folder all the way to rootPath
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
}
var edition = Parser.ParseEdition(fileName);
if (!string.IsNullOrEmpty(edition))
{
ret.Series = Parser.CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic);
ret.Edition = edition;
}
var isSpecial = type == LibraryType.Comic ? Parser.ParseComicSpecial(fileName) : Parser.ParseMangaSpecial(fileName);
// We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that
// could cause a problem as Omake is a special term, but there is valid volume/chapter information.
if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.DefaultVolume && !string.IsNullOrEmpty(isSpecial))
{
ret.IsSpecial = true;
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
}
// If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name
if (Parser.HasSpecialMarker(fileName))
{
ret.IsSpecial = true;
ret.Chapters = Parser.DefaultChapter;
ret.Volumes = Parser.DefaultVolume;
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
}
if (string.IsNullOrEmpty(ret.Series))
{
ret.Series = Parser.CleanTitle(fileName, type is LibraryType.Comic);
}
// Pdfs may have .pdf in the series name, remove that
if (Parser.IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
{
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
}
return ret.Series == string.Empty ? null : ret;
}
/// <summary>
/// Fills out <see cref="ParserInfo"/> by trying to parse volume, chapters, and series from folders
/// </summary>
/// <param name="filePath"></param>
/// <param name="rootPath"></param>
/// <param name="type"></param>
/// <param name="ret">Expects a non-null ParserInfo which this method will populate</param>
public void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret)
{
var fallbackFolders = _directoryService.GetFoldersTillRoot(rootPath, filePath).ToList();
for (var i = 0; i < fallbackFolders.Count; i++)
{
var folder = fallbackFolders[i];
if (!string.IsNullOrEmpty(Parser.ParseMangaSpecial(folder))) continue;
var parsedVolume = type is LibraryType.Manga ? Parser.ParseVolume(folder) : Parser.ParseComicVolume(folder);
var parsedChapter = type is LibraryType.Manga ? Parser.ParseChapter(folder) : Parser.ParseComicChapter(folder);
if (!parsedVolume.Equals(Parser.DefaultVolume) || !parsedChapter.Equals(Parser.DefaultChapter))
{
if ((ret.Volumes.Equals(Parser.DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(Parser.DefaultVolume))
{
ret.Volumes = parsedVolume;
}
if ((ret.Chapters.Equals(Parser.DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(Parser.DefaultChapter))
{
ret.Chapters = parsedChapter;
}
}
var series = Parser.ParseSeries(folder);
if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1))
{
ret.Series = Parser.CleanTitle(folder, type is LibraryType.Comic);
break;
}
if (!string.IsNullOrEmpty(series))
{
ret.Series = series;
break;
}
}
}
}

View File

@ -491,146 +491,146 @@ namespace API.Parser
); );
/// <summary> // /// <summary>
/// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed // /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed
/// from filename. // /// from filename.
/// </summary> // /// </summary>
/// <param name="filePath"></param> // /// <param name="filePath"></param>
/// <param name="rootPath">Root folder</param> // /// <param name="rootPath">Root folder</param>
/// <param name="type">Defaults to Manga. Allows different Regex to be used for parsing.</param> // /// <param name="type">Defaults to Manga. Allows different Regex to be used for parsing.</param>
/// <returns><see cref="ParserInfo"/> or null if Series was empty</returns> // /// <returns><see cref="ParserInfo"/> or null if Series was empty</returns>
public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga) // public static ParserInfo Parse(string filePath, string rootPath, IDirectoryService directoryService, LibraryType type = LibraryType.Manga)
{ // {
var fileName = Path.GetFileNameWithoutExtension(filePath); // var fileName = directoryService.FileSystem.Path.GetFileNameWithoutExtension(filePath);
ParserInfo ret; // ParserInfo ret;
//
if (IsEpub(filePath)) // if (IsEpub(filePath))
{ // {
ret = new ParserInfo() // ret = new ParserInfo()
{ // {
Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName), // Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName),
Series = ParseSeries(fileName) ?? ParseComicSeries(fileName), // Series = ParseSeries(fileName) ?? ParseComicSeries(fileName),
Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName), // Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName),
Filename = Path.GetFileName(filePath), // Filename = Path.GetFileName(filePath),
Format = ParseFormat(filePath), // Format = ParseFormat(filePath),
FullFilePath = filePath // FullFilePath = filePath
}; // };
} // }
else // else
{ // {
ret = new ParserInfo() // ret = new ParserInfo()
{ // {
Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), // Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName),
Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), // Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName),
Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), // Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName),
Filename = Path.GetFileName(filePath), // Filename = Path.GetFileName(filePath),
Format = ParseFormat(filePath), // Format = ParseFormat(filePath),
Title = Path.GetFileNameWithoutExtension(fileName), // Title = Path.GetFileNameWithoutExtension(fileName),
FullFilePath = filePath // FullFilePath = filePath
}; // };
} // }
//
if (IsImage(filePath) && IsCoverImage(filePath)) return null; // if (IsImage(filePath) && IsCoverImage(filePath)) return null;
//
if (IsImage(filePath)) // if (IsImage(filePath))
{ // {
// Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders. // // Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders.
ret.Volumes = DefaultVolume; // ret.Volumes = DefaultVolume;
ret.Chapters = DefaultChapter; // ret.Chapters = DefaultChapter;
ret.Series = string.Empty; // ret.Series = string.Empty;
} // }
//
if (ret.Series == string.Empty || IsImage(filePath)) // if (ret.Series == string.Empty || IsImage(filePath))
{ // {
// Try to parse information out of each folder all the way to rootPath // // Try to parse information out of each folder all the way to rootPath
ParseFromFallbackFolders(filePath, rootPath, type, ref ret); // ParseFromFallbackFolders(filePath, rootPath, type, directoryService, ref ret);
} // }
//
var edition = ParseEdition(fileName); // var edition = ParseEdition(fileName);
if (!string.IsNullOrEmpty(edition)) // if (!string.IsNullOrEmpty(edition))
{ // {
ret.Series = CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic); // ret.Series = CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic);
ret.Edition = edition; // ret.Edition = edition;
} // }
//
var isSpecial = type == LibraryType.Comic ? ParseComicSpecial(fileName) : ParseMangaSpecial(fileName); // var isSpecial = type == LibraryType.Comic ? ParseComicSpecial(fileName) : ParseMangaSpecial(fileName);
// We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that // // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that
// could cause a problem as Omake is a special term, but there is valid volume/chapter information. // // could cause a problem as Omake is a special term, but there is valid volume/chapter information.
if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial)) // if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial))
{ // {
ret.IsSpecial = true; // ret.IsSpecial = true;
ParseFromFallbackFolders(filePath, rootPath, type, ref ret); // ParseFromFallbackFolders(filePath, rootPath, type, directoryService, ref ret);
} // }
//
// If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name // // If we are a special with marker, we need to ensure we use the correct series name. we can do this by falling back to Folder name
if (HasSpecialMarker(fileName)) // if (HasSpecialMarker(fileName))
{ // {
ret.IsSpecial = true; // ret.IsSpecial = true;
ret.Chapters = DefaultChapter; // ret.Chapters = DefaultChapter;
ret.Volumes = DefaultVolume; // ret.Volumes = DefaultVolume;
//
ParseFromFallbackFolders(filePath, rootPath, type, ref ret); // ParseFromFallbackFolders(filePath, rootPath, type, directoryService, ref ret);
} // }
//
if (string.IsNullOrEmpty(ret.Series)) // if (string.IsNullOrEmpty(ret.Series))
{ // {
ret.Series = CleanTitle(fileName, type is LibraryType.Comic); // ret.Series = CleanTitle(fileName, type is LibraryType.Comic);
} // }
//
// Pdfs may have .pdf in the series name, remove that // // Pdfs may have .pdf in the series name, remove that
if (IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf")) // if (IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
{ // {
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length); // ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
} // }
//
return ret.Series == string.Empty ? null : ret; // return ret.Series == string.Empty ? null : ret;
} // }
//
/// <summary> // /// <summary>
/// // ///
/// </summary> // /// </summary>
/// <param name="filePath"></param> // /// <param name="filePath"></param>
/// <param name="rootPath"></param> // /// <param name="rootPath"></param>
/// <param name="type"></param> // /// <param name="type"></param>
/// <param name="ret">Expects a non-null ParserInfo which this method will populate</param> // /// <param name="ret">Expects a non-null ParserInfo which this method will populate</param>
public static void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret) // public static void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, IDirectoryService directoryService, ref ParserInfo ret)
{ // {
var fallbackFolders = DirectoryService.GetFoldersTillRoot(rootPath, filePath).ToList(); // var fallbackFolders = directoryService.GetFoldersTillRoot(rootPath, filePath).ToList();
for (var i = 0; i < fallbackFolders.Count; i++) // for (var i = 0; i < fallbackFolders.Count; i++)
{ // {
var folder = fallbackFolders[i]; // var folder = fallbackFolders[i];
if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; // if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue;
//
var parsedVolume = type is LibraryType.Manga ? ParseVolume(folder) : ParseComicVolume(folder); // var parsedVolume = type is LibraryType.Manga ? ParseVolume(folder) : ParseComicVolume(folder);
var parsedChapter = type is LibraryType.Manga ? ParseChapter(folder) : ParseComicChapter(folder); // var parsedChapter = type is LibraryType.Manga ? ParseChapter(folder) : ParseComicChapter(folder);
//
if (!parsedVolume.Equals(DefaultVolume) || !parsedChapter.Equals(DefaultChapter)) // if (!parsedVolume.Equals(DefaultVolume) || !parsedChapter.Equals(DefaultChapter))
{ // {
if ((ret.Volumes.Equals(DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(DefaultVolume)) // if ((ret.Volumes.Equals(DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(DefaultVolume))
{ // {
ret.Volumes = parsedVolume; // ret.Volumes = parsedVolume;
} // }
if ((ret.Chapters.Equals(DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(DefaultChapter)) // if ((ret.Chapters.Equals(DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(DefaultChapter))
{ // {
ret.Chapters = parsedChapter; // ret.Chapters = parsedChapter;
} // }
} // }
//
var series = ParseSeries(folder); // var series = ParseSeries(folder);
//
if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1)) // if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1))
{ // {
ret.Series = CleanTitle(folder, type is LibraryType.Comic); // ret.Series = CleanTitle(folder, type is LibraryType.Comic);
break; // break;
} // }
//
if (!string.IsNullOrEmpty(series)) // if (!string.IsNullOrEmpty(series))
{ // {
ret.Series = series; // ret.Series = series;
break; // break;
} // }
} // }
} // }
public static MangaFormat ParseFormat(string filePath) public static MangaFormat ParseFormat(string filePath)
{ {

View File

@ -1,5 +1,6 @@
using System; using System;
using System.IO; using System.IO;
using System.IO.Abstractions;
using System.Security.Cryptography; using System.Security.Cryptography;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data; using API.Data;
@ -15,6 +16,7 @@ using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NetVips;
namespace API namespace API
{ {
@ -31,7 +33,19 @@ namespace API
Console.OutputEncoding = System.Text.Encoding.UTF8; Console.OutputEncoding = System.Text.Encoding.UTF8;
var isDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker; var isDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker;
MigrateConfigFiles.Migrate(isDocker); var migrateLogger = LoggerFactory.Create(builder =>
{
builder
//.AddConfiguration(Configuration.GetSection("Logging"))
.AddFilter("Microsoft", LogLevel.Warning)
.AddFilter("System", LogLevel.Warning)
.AddFilter("SampleApp.Program", LogLevel.Debug)
.AddConsole()
.AddEventLog();
});
var mLogger = migrateLogger.CreateLogger<DirectoryService>();
MigrateConfigFiles.Migrate(isDocker, new DirectoryService(mLogger, new FileSystem()));
// Before anything, check if JWT has been generated properly or if user still has default // Before anything, check if JWT has been generated properly or if user still has default
if (!Configuration.CheckIfJwtTokenSet() && if (!Configuration.CheckIfJwtTokenSet() &&
@ -60,14 +74,16 @@ namespace API
return; return;
} }
var directoryService = services.GetRequiredService<DirectoryService>();
var requiresCoverImageMigration = !Directory.Exists(DirectoryService.CoverImageDirectory);
var requiresCoverImageMigration = !Directory.Exists(directoryService.CoverImageDirectory);
try try
{ {
// If this is a new install, tables wont exist yet // If this is a new install, tables wont exist yet
if (requiresCoverImageMigration) if (requiresCoverImageMigration)
{ {
MigrateCoverImages.ExtractToImages(context); MigrateCoverImages.ExtractToImages(context, directoryService, services.GetRequiredService<ImageService>());
} }
} }
catch (Exception) catch (Exception)
@ -80,11 +96,11 @@ namespace API
if (requiresCoverImageMigration) if (requiresCoverImageMigration)
{ {
await MigrateCoverImages.UpdateDatabaseWithImages(context); await MigrateCoverImages.UpdateDatabaseWithImages(context, directoryService);
} }
await Seed.SeedRoles(roleManager); await Seed.SeedRoles(roleManager);
await Seed.SeedSettings(context); await Seed.SeedSettings(context, directoryService);
await Seed.SeedUserApiKeys(context); await Seed.SeedUserApiKeys(context);
} }
catch (Exception ex) catch (Exception ex)

View File

@ -3,12 +3,16 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Errors; using API.Errors;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services namespace API.Services
{ {
public interface IAccountService
{
Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword);
}
public class AccountService : IAccountService public class AccountService : IAccountService
{ {
private readonly UserManager<AppUser> _userManager; private readonly UserManager<AppUser> _userManager;

View File

@ -10,7 +10,6 @@ using API.Archive;
using API.Comparators; using API.Comparators;
using API.Data.Metadata; using API.Data.Metadata;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks; using API.Services.Tasks;
using Kavita.Common; using Kavita.Common;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -19,6 +18,18 @@ using SharpCompress.Common;
namespace API.Services namespace API.Services
{ {
public interface IArchiveService
{
void ExtractArchive(string archivePath, string extractPath);
int GetNumberOfPagesFromArchive(string archivePath);
string GetCoverImage(string archivePath, string fileName);
bool IsValidArchive(string archivePath);
ComicInfo GetComicInfo(string archivePath);
ArchiveLibrary CanOpen(string archivePath);
bool ArchiveNeedsFlattening(ZipArchive archive);
Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder);
}
/// <summary> /// <summary>
/// Responsible for manipulating Archive files. Used by <see cref="CacheService"/> and <see cref="ScannerService"/> /// Responsible for manipulating Archive files. Used by <see cref="CacheService"/> and <see cref="ScannerService"/>
/// </summary> /// </summary>
@ -27,12 +38,14 @@ namespace API.Services
{ {
private readonly ILogger<ArchiveService> _logger; private readonly ILogger<ArchiveService> _logger;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IImageService _imageService;
private const string ComicInfoFilename = "comicinfo"; private const string ComicInfoFilename = "comicinfo";
public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService) public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService, IImageService imageService)
{ {
_logger = logger; _logger = logger;
_directoryService = directoryService; _directoryService = directoryService;
_imageService = imageService;
} }
/// <summary> /// <summary>
@ -42,7 +55,7 @@ namespace API.Services
/// <returns></returns> /// <returns></returns>
public virtual ArchiveLibrary CanOpen(string archivePath) public virtual ArchiveLibrary CanOpen(string archivePath)
{ {
if (!(File.Exists(archivePath) && Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported; if (string.IsNullOrEmpty(archivePath) || !(File.Exists(archivePath) && Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath))) return ArchiveLibrary.NotSupported;
try try
{ {
@ -239,14 +252,14 @@ namespace API.Services
{ {
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var tempLocation = Path.Join(DirectoryService.TempDirectory, $"{tempFolder}_{dateString}"); var tempLocation = Path.Join(_directoryService.TempDirectory, $"{tempFolder}_{dateString}");
DirectoryService.ExistOrCreate(tempLocation); _directoryService.ExistOrCreate(tempLocation);
if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
{ {
throw new KavitaException("Unable to copy files to temp directory archive download."); throw new KavitaException("Unable to copy files to temp directory archive download.");
} }
var zipPath = Path.Join(DirectoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); var zipPath = Path.Join(_directoryService.TempDirectory, $"kavita_{tempFolder}_{dateString}.zip");
try try
{ {
ZipFile.CreateFromDirectory(tempLocation, zipPath); ZipFile.CreateFromDirectory(tempLocation, zipPath);
@ -260,7 +273,7 @@ namespace API.Services
var fileBytes = await _directoryService.ReadFileAsync(zipPath); var fileBytes = await _directoryService.ReadFileAsync(zipPath);
DirectoryService.ClearAndDeleteDirectory(tempLocation); _directoryService.ClearAndDeleteDirectory(tempLocation); // NOTE: For sending back just zip, just schedule this to be called after the file is returned or let next temp storage cleanup take care of it
(new FileInfo(zipPath)).Delete(); (new FileInfo(zipPath)).Delete();
return Tuple.Create(fileBytes, zipPath); return Tuple.Create(fileBytes, zipPath);
@ -270,7 +283,7 @@ namespace API.Services
{ {
try try
{ {
return ImageService.WriteCoverThumbnail(stream, fileName); return _imageService.WriteCoverThumbnail(stream, fileName);
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -413,9 +426,9 @@ namespace API.Services
} }
private static void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath) private void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath)
{ {
DirectoryService.ExistOrCreate(extractPath); _directoryService.ExistOrCreate(extractPath);
foreach (var entry in entries) foreach (var entry in entries)
{ {
entry.WriteToDirectory(extractPath, new ExtractionOptions() entry.WriteToDirectory(extractPath, new ExtractionOptions()
@ -428,7 +441,7 @@ namespace API.Services
private void ExtractArchiveEntries(ZipArchive archive, string extractPath) private void ExtractArchiveEntries(ZipArchive archive, string extractPath)
{ {
// NOTE: In cases where we try to extract, but there are InvalidPathChars, we need to inform the user // TODO: In cases where we try to extract, but there are InvalidPathChars, we need to inform the user
var needsFlattening = ArchiveNeedsFlattening(archive); var needsFlattening = ArchiveNeedsFlattening(archive);
if (!archive.HasFiles() && !needsFlattening) return; if (!archive.HasFiles() && !needsFlattening) return;
@ -436,7 +449,7 @@ namespace API.Services
if (!needsFlattening) return; if (!needsFlattening) return;
_logger.LogDebug("Extracted archive is nested in root folder, flattening..."); _logger.LogDebug("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten(); _directoryService.Flatten(extractPath);
} }
/// <summary> /// <summary>

View File

@ -11,7 +11,6 @@ using System.Threading.Tasks;
using System.Web; using System.Web;
using API.Data.Metadata; using API.Data.Metadata;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Services;
using API.Parser; using API.Parser;
using Docnet.Core; using Docnet.Core;
using Docnet.Core.Converters; using Docnet.Core.Converters;
@ -25,17 +24,45 @@ using VersOne.Epub;
namespace API.Services namespace API.Services
{ {
public interface IBookService
{
int GetNumberOfPages(string filePath);
string GetCoverImage(string fileFilePath, string fileName);
Task<Dictionary<string, int>> CreateKeyToPageMappingAsync(EpubBookRef book);
/// <summary>
/// Scopes styles to .reading-section and replaces img src to the passed apiBase
/// </summary>
/// <param name="stylesheetHtml"></param>
/// <param name="apiBase"></param>
/// <param name="filename">If the stylesheetHtml contains Import statements, when scoping the filename, scope needs to be wrt filepath.</param>
/// <param name="book">Book Reference, needed for if you expect Import statements</param>
/// <returns></returns>
Task<string> ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book);
ComicInfo GetComicInfo(string filePath);
ParserInfo ParseInfo(string filePath);
/// <summary>
/// Extracts a PDF file's pages as images to an target directory
/// </summary>
/// <param name="fileFilePath"></param>
/// <param name="targetDirectory">Where the files will be extracted to. If doesn't exist, will be created.</param>
void ExtractPdfImages(string fileFilePath, string targetDirectory);
}
public class BookService : IBookService public class BookService : IBookService
{ {
private readonly ILogger<BookService> _logger; private readonly ILogger<BookService> _logger;
private readonly IDirectoryService _directoryService;
private readonly IImageService _imageService;
private readonly StylesheetParser _cssParser = new (); private readonly StylesheetParser _cssParser = new ();
private static readonly RecyclableMemoryStreamManager StreamManager = new (); private static readonly RecyclableMemoryStreamManager StreamManager = new ();
private const string CssScopeClass = ".book-content"; private const string CssScopeClass = ".book-content";
public BookService(ILogger<BookService> logger) public BookService(ILogger<BookService> logger, IDirectoryService directoryService, IImageService imageService)
{ {
_logger = logger; _logger = logger;
_directoryService = directoryService;
_imageService = imageService;
} }
private static bool HasClickableHrefPart(HtmlNode anchor) private static bool HasClickableHrefPart(HtmlNode anchor)
@ -431,7 +458,7 @@ namespace API.Services
public void ExtractPdfImages(string fileFilePath, string targetDirectory) public void ExtractPdfImages(string fileFilePath, string targetDirectory)
{ {
DirectoryService.ExistOrCreate(targetDirectory); _directoryService.ExistOrCreate(targetDirectory);
using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920)); using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920));
var pages = docReader.GetPageCount(); var pages = docReader.GetPageCount();
@ -473,7 +500,7 @@ namespace API.Services
if (coverImageContent == null) return string.Empty; if (coverImageContent == null) return string.Empty;
using var stream = coverImageContent.GetContentStream(); using var stream = coverImageContent.GetContentStream();
return ImageService.WriteCoverThumbnail(stream, fileName); return _imageService.WriteCoverThumbnail(stream, fileName);
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -494,7 +521,7 @@ namespace API.Services
using var stream = StreamManager.GetStream("BookService.GetPdfPage"); using var stream = StreamManager.GetStream("BookService.GetPdfPage");
GetPdfPage(docReader, 0, stream); GetPdfPage(docReader, 0, stream);
return ImageService.WriteCoverThumbnail(stream, fileName); return _imageService.WriteCoverThumbnail(stream, fileName);
} }
catch (Exception ex) catch (Exception ex)

View File

@ -4,43 +4,50 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.Data;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services namespace API.Services
{ {
public interface ICacheService
{
/// <summary>
/// Ensures the cache is created for the given chapter and if not, will create it. Should be called before any other
/// cache operations (except cleanup).
/// </summary>
/// <param name="chapterId"></param>
/// <returns>Chapter for the passed chapterId. Side-effect from ensuring cache.</returns>
Task<Chapter> Ensure(int chapterId);
/// <summary>
/// Clears cache directory of all volumes. This can be invoked from deleting a library or a series.
/// </summary>
/// <param name="chapterIds">Volumes that belong to that library. Assume the library might have been deleted before this invocation.</param>
void CleanupChapters(IEnumerable<int> chapterIds);
string GetCachedPagePath(Chapter chapter, int page);
string GetCachedEpubFile(int chapterId, Chapter chapter);
public void ExtractChapterFiles(string extractPath, IReadOnlyList<MangaFile> files);
}
public class CacheService : ICacheService public class CacheService : ICacheService
{ {
private readonly ILogger<CacheService> _logger; private readonly ILogger<CacheService> _logger;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IArchiveService _archiveService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IBookService _bookService; private readonly IReadingItemService _readingItemService;
private readonly NumericComparer _numericComparer; private readonly NumericComparer _numericComparer;
public CacheService(ILogger<CacheService> logger, IUnitOfWork unitOfWork, IArchiveService archiveService, public CacheService(ILogger<CacheService> logger, IUnitOfWork unitOfWork,
IDirectoryService directoryService, IBookService bookService) IDirectoryService directoryService, IReadingItemService readingItemService)
{ {
_logger = logger; _logger = logger;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_archiveService = archiveService;
_directoryService = directoryService; _directoryService = directoryService;
_bookService = bookService; _readingItemService = readingItemService;
_numericComparer = new NumericComparer(); _numericComparer = new NumericComparer();
} }
public void EnsureCacheDirectory()
{
if (!DirectoryService.ExistOrCreate(DirectoryService.CacheDirectory))
{
_logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", DirectoryService.CacheDirectory);
}
}
/// <summary> /// <summary>
/// Returns the full path to the cached epub file. If the file does not exist, will fallback to the original. /// Returns the full path to the cached epub file. If the file does not exist, will fallback to the original.
/// </summary> /// </summary>
@ -50,8 +57,8 @@ namespace API.Services
public string GetCachedEpubFile(int chapterId, Chapter chapter) public string GetCachedEpubFile(int chapterId, Chapter chapter)
{ {
var extractPath = GetCachePath(chapterId); var extractPath = GetCachePath(chapterId);
var path = Path.Join(extractPath, Path.GetFileName(chapter.Files.First().FilePath)); var path = Path.Join(extractPath, _directoryService.FileSystem.Path.GetFileName(chapter.Files.First().FilePath));
if (!(new FileInfo(path).Exists)) if (!(_directoryService.FileSystem.FileInfo.FromFileName(path).Exists))
{ {
path = chapter.Files.First().FilePath; path = chapter.Files.First().FilePath;
} }
@ -62,14 +69,14 @@ namespace API.Services
/// Caches the files for the given chapter to CacheDirectory /// Caches the files for the given chapter to CacheDirectory
/// </summary> /// </summary>
/// <param name="chapterId"></param> /// <param name="chapterId"></param>
/// <returns>This will always return the Chapter for the chpaterId</returns> /// <returns>This will always return the Chapter for the chapterId</returns>
public async Task<Chapter> Ensure(int chapterId) public async Task<Chapter> Ensure(int chapterId)
{ {
EnsureCacheDirectory(); _directoryService.ExistOrCreate(_directoryService.CacheDirectory);
var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId);
var extractPath = GetCachePath(chapterId); var extractPath = GetCachePath(chapterId);
if (!Directory.Exists(extractPath)) if (!_directoryService.Exists(extractPath))
{ {
var files = chapter.Files.ToList(); var files = chapter.Files.ToList();
ExtractChapterFiles(extractPath, files); ExtractChapterFiles(extractPath, files);
@ -90,22 +97,12 @@ namespace API.Services
var removeNonImages = true; var removeNonImages = true;
var fileCount = files.Count; var fileCount = files.Count;
var extraPath = ""; var extraPath = "";
var extractDi = new DirectoryInfo(extractPath); var extractDi = _directoryService.FileSystem.DirectoryInfo.FromDirectoryName(extractPath);
if (files.Count > 0 && files[0].Format == MangaFormat.Image) if (files.Count > 0 && files[0].Format == MangaFormat.Image)
{ {
DirectoryService.ExistOrCreate(extractPath); _readingItemService.Extract(files[0].FilePath, extractPath, MangaFormat.Image, files.Count);
if (files.Count == 1) _directoryService.Flatten(extractDi.FullName);
{
_directoryService.CopyFileToDirectory(files[0].FilePath, extractPath);
}
else
{
DirectoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(files[0].FilePath), extractPath,
Parser.Parser.ImageFileExtensions);
}
extractDi.Flatten();
} }
foreach (var file in files) foreach (var file in files)
@ -117,63 +114,37 @@ namespace API.Services
if (file.Format == MangaFormat.Archive) if (file.Format == MangaFormat.Archive)
{ {
_archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath)); _readingItemService.Extract(file.FilePath, Path.Join(extractPath, extraPath), file.Format);
} }
else if (file.Format == MangaFormat.Pdf) else if (file.Format == MangaFormat.Pdf)
{ {
_bookService.ExtractPdfImages(file.FilePath, Path.Join(extractPath, extraPath)); _readingItemService.Extract(file.FilePath, Path.Join(extractPath, extraPath), file.Format);
} }
else if (file.Format == MangaFormat.Epub) else if (file.Format == MangaFormat.Epub)
{ {
removeNonImages = false; removeNonImages = false;
DirectoryService.ExistOrCreate(extractPath); _directoryService.ExistOrCreate(extractPath);
_directoryService.CopyFileToDirectory(files[0].FilePath, extractPath); _directoryService.CopyFileToDirectory(files[0].FilePath, extractPath);
} }
} }
extractDi.Flatten(); _directoryService.Flatten(extractDi.FullName);
if (removeNonImages) if (removeNonImages)
{ {
extractDi.RemoveNonImages(); _directoryService.RemoveNonImages(extractDi.FullName);
} }
} }
public void Cleanup()
{
_logger.LogInformation("Performing cleanup of Cache directory");
EnsureCacheDirectory();
try
{
DirectoryService.ClearDirectory(DirectoryService.CacheDirectory);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup");
}
_logger.LogInformation("Cache directory purged");
}
/// <summary> /// <summary>
/// Removes the cached files and folders for a set of chapterIds /// Removes the cached files and folders for a set of chapterIds
/// </summary> /// </summary>
/// <param name="chapterIds"></param> /// <param name="chapterIds"></param>
public void CleanupChapters(IEnumerable<int> chapterIds) public void CleanupChapters(IEnumerable<int> chapterIds)
{ {
_logger.LogInformation("Running Cache cleanup on Chapters");
foreach (var chapter in chapterIds) foreach (var chapter in chapterIds)
{ {
var di = new DirectoryInfo(GetCachePath(chapter)); _directoryService.ClearDirectory(GetCachePath(chapter));
if (di.Exists)
{
di.Delete(true);
} }
}
_logger.LogInformation("Cache directory purged");
} }
@ -184,46 +155,29 @@ namespace API.Services
/// <returns></returns> /// <returns></returns>
private string GetCachePath(int chapterId) private string GetCachePath(int chapterId)
{ {
return Path.GetFullPath(Path.Join(DirectoryService.CacheDirectory, $"{chapterId}/")); return _directoryService.FileSystem.Path.GetFullPath(_directoryService.FileSystem.Path.Join(_directoryService.CacheDirectory, $"{chapterId}/"));
} }
public async Task<(string path, MangaFile file)> GetCachedPagePath(Chapter chapter, int page) /// <summary>
/// Returns the absolute path of a cached page.
/// </summary>
/// <param name="chapter">Chapter entity with Files populated.</param>
/// <param name="page">Page number to look for</param>
/// <returns>Page filepath or empty if no files found.</returns>
public string GetCachedPagePath(Chapter chapter, int page)
{ {
// Calculate what chapter the page belongs to // Calculate what chapter the page belongs to
var pagesSoFar = 0;
var chapterFiles = chapter.Files ?? await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id);
foreach (var mangaFile in chapterFiles)
{
if (page <= (mangaFile.Pages + pagesSoFar))
{
var path = GetCachePath(chapter.Id); var path = GetCachePath(chapter.Id);
var files = DirectoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions); var files = _directoryService.GetFilesWithExtension(path, Parser.Parser.ImageFileExtensions);
Array.Sort(files, _numericComparer); Array.Sort(files, _numericComparer);
if (files.Length == 0) if (files.Length == 0)
{ {
return (files.ElementAt(0), mangaFile); return string.Empty;
} }
// Since array is 0 based, we need to keep that in account (only affects last image) // Since array is 0 based, we need to keep that in account (only affects last image)
if (page == files.Length) return page == files.Length ? files.ElementAt(page - 1) : files.ElementAt(page);
{
return (files.ElementAt(page - 1 - pagesSoFar), mangaFile);
}
if (mangaFile.Format == MangaFormat.Image && mangaFile.Pages == 1)
{
// Each file is one page, meaning we should just get element at page
return (files.ElementAt(page), mangaFile);
}
return (files.ElementAt(page - pagesSoFar), mangaFile);
}
pagesSoFar += mangaFile.Pages;
}
return (string.Empty, null);
} }
} }
} }

View File

@ -1,4 +1,5 @@
using System; using System;
using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.Immutable; using System.Collections.Immutable;
using System.IO; using System.IO;
@ -6,30 +7,74 @@ using System.IO.Abstractions;
using System.Linq; using System.Linq;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Interfaces.Services; using API.Comparators;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services namespace API.Services
{ {
public interface IDirectoryService
{
IFileSystem FileSystem { get; }
string CacheDirectory { get; }
string CoverImageDirectory { get; }
string LogDirectory { get; }
string TempDirectory { get; }
string ConfigDirectory { get; }
/// <summary>
/// Lists out top-level folders for a given directory. Filters out System and Hidden folders.
/// </summary>
/// <param name="rootPath">Absolute path of directory to scan.</param>
/// <returns>List of folder names</returns>
IEnumerable<string> ListDirectory(string rootPath);
Task<byte[]> ReadFileAsync(string path);
bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "");
bool Exists(string directory);
void CopyFileToDirectory(string fullFilePath, string targetDirectory);
int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern, ILogger logger);
bool IsDriveMounted(string path);
long GetTotalSize(IEnumerable<string> paths);
void ClearDirectory(string directoryPath);
void ClearAndDeleteDirectory(string directoryPath);
string[] GetFilesWithExtension(string path, string searchPatternExpression = "");
bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "");
Dictionary<string, string> FindHighestDirectoriesFromFiles(IEnumerable<string> libraryFolders,
IList<string> filePaths);
IEnumerable<string> GetFoldersTillRoot(string rootPath, string fullPath);
IEnumerable<string> GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly);
bool ExistOrCreate(string directoryPath);
void DeleteFiles(IEnumerable<string> files);
void RemoveNonImages(string directoryName);
void Flatten(string directoryName);
}
public class DirectoryService : IDirectoryService public class DirectoryService : IDirectoryService
{ {
public IFileSystem FileSystem { get; }
public string CacheDirectory { get; }
public string CoverImageDirectory { get; }
public string LogDirectory { get; }
public string TempDirectory { get; }
public string ConfigDirectory { get; }
private readonly ILogger<DirectoryService> _logger; private readonly ILogger<DirectoryService> _logger;
private readonly IFileSystem _fileSystem;
private static readonly Regex ExcludeDirectories = new Regex( private static readonly Regex ExcludeDirectories = new Regex(
@"@eaDir|\.DS_Store", @"@eaDir|\.DS_Store",
RegexOptions.Compiled | RegexOptions.IgnoreCase); RegexOptions.Compiled | RegexOptions.IgnoreCase);
public static readonly string TempDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "temp");
public static readonly string LogDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "logs");
public static readonly string CacheDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "cache");
public static readonly string CoverImageDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "covers");
public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups"); public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups");
public static readonly string ConfigDirectory = Path.Join(Directory.GetCurrentDirectory(), "config");
public DirectoryService(ILogger<DirectoryService> logger, IFileSystem fileSystem) public DirectoryService(ILogger<DirectoryService> logger, IFileSystem fileSystem)
{ {
_logger = logger; _logger = logger;
_fileSystem = fileSystem; FileSystem = fileSystem;
CoverImageDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "covers");
CacheDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "cache");
LogDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "logs");
TempDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config", "temp");
ConfigDirectory = FileSystem.Path.Join(FileSystem.Directory.GetCurrentDirectory(), "config");
} }
/// <summary> /// <summary>
@ -40,16 +85,16 @@ namespace API.Services
/// <param name="searchPatternExpression">Regex version of search pattern (ie \.mp3|\.mp4). Defaults to * meaning all files.</param> /// <param name="searchPatternExpression">Regex version of search pattern (ie \.mp3|\.mp4). Defaults to * meaning all files.</param>
/// <param name="searchOption">SearchOption to use, defaults to TopDirectoryOnly</param> /// <param name="searchOption">SearchOption to use, defaults to TopDirectoryOnly</param>
/// <returns>List of file paths</returns> /// <returns>List of file paths</returns>
private static IEnumerable<string> GetFilesWithCertainExtensions(string path, private IEnumerable<string> GetFilesWithCertainExtensions(string path,
string searchPatternExpression = "", string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly) SearchOption searchOption = SearchOption.TopDirectoryOnly)
{ {
if (!Directory.Exists(path)) return ImmutableList<string>.Empty; if (!FileSystem.Directory.Exists(path)) return ImmutableList<string>.Empty;
var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase);
return Directory.EnumerateFiles(path, "*", searchOption) return FileSystem.Directory.EnumerateFiles(path, "*", searchOption)
.Where(file => .Where(file =>
reSearchPattern.IsMatch(Path.GetExtension(file)) && !Path.GetFileName(file).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); reSearchPattern.IsMatch(FileSystem.Path.GetExtension(file)) && !FileSystem.Path.GetFileName(file).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith));
} }
@ -61,17 +106,17 @@ namespace API.Services
/// <param name="rootPath"></param> /// <param name="rootPath"></param>
/// <param name="fullPath"></param> /// <param name="fullPath"></param>
/// <returns></returns> /// <returns></returns>
public static IEnumerable<string> GetFoldersTillRoot(string rootPath, string fullPath) public IEnumerable<string> GetFoldersTillRoot(string rootPath, string fullPath)
{ {
var separator = Path.AltDirectorySeparatorChar; var separator = FileSystem.Path.AltDirectorySeparatorChar;
if (fullPath.Contains(Path.DirectorySeparatorChar)) if (fullPath.Contains(FileSystem.Path.DirectorySeparatorChar))
{ {
fullPath = fullPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); fullPath = fullPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar);
} }
if (rootPath.Contains(Path.DirectorySeparatorChar)) if (rootPath.Contains(Path.DirectorySeparatorChar))
{ {
rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); rootPath = rootPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar);
} }
@ -80,14 +125,15 @@ namespace API.Services
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath; var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
var paths = new List<string>(); var paths = new List<string>();
// If a file is at the end of the path, remove it before we start processing folders // If a file is at the end of the path, remove it before we start processing folders
if (Path.GetExtension(path) != string.Empty) if (FileSystem.Path.GetExtension(path) != string.Empty)
{ {
path = path.Substring(0, path.LastIndexOf(separator)); path = path.Substring(0, path.LastIndexOf(separator));
} }
while (Path.GetDirectoryName(path) != Path.GetDirectoryName(root)) while (FileSystem.Path.GetDirectoryName(path) != Path.GetDirectoryName(root))
{ {
var folder = new DirectoryInfo(path).Name; //var folder = new DirectoryInfo(path).Name;
var folder = FileSystem.DirectoryInfo.FromDirectoryName(path).Name;
paths.Add(folder); paths.Add(folder);
path = path.Substring(0, path.LastIndexOf(separator)); path = path.Substring(0, path.LastIndexOf(separator));
} }
@ -102,33 +148,54 @@ namespace API.Services
/// <returns></returns> /// <returns></returns>
public bool Exists(string directory) public bool Exists(string directory)
{ {
var di = new DirectoryInfo(directory); var di = FileSystem.DirectoryInfo.FromDirectoryName(directory);
return di.Exists; return di.Exists;
} }
public static IEnumerable<string> GetFiles(string path, string searchPatternExpression = "", /// <summary>
SearchOption searchOption = SearchOption.TopDirectoryOnly) /// Get files given a path.
/// </summary>
/// <remarks>This will automatically filter out restricted files, like MacOsMetadata files</remarks>
/// <param name="path"></param>
/// <param name="fileNameRegex">An optional regex string to search against. Will use file path to match against.</param>
/// <param name="searchOption">Defaults to top level directory only, can be given all to provide recursive searching</param>
/// <returns></returns>
public IEnumerable<string> GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly)
{ {
if (searchPatternExpression != string.Empty) // TODO: Refactor this and GetFilesWithCertainExtensions to use same implementation
if (!FileSystem.Directory.Exists(path)) return ImmutableList<string>.Empty;
if (fileNameRegex != string.Empty)
{ {
if (!Directory.Exists(path)) return ImmutableList<string>.Empty; var reSearchPattern = new Regex(fileNameRegex, RegexOptions.IgnoreCase);
var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase); return FileSystem.Directory.EnumerateFiles(path, "*", searchOption)
return Directory.EnumerateFiles(path, "*", searchOption)
.Where(file => .Where(file =>
reSearchPattern.IsMatch(file) && !file.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); {
var fileName = FileSystem.Path.GetFileName(file);
return reSearchPattern.IsMatch(fileName) &&
!fileName.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith);
});
} }
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path); return FileSystem.Directory.EnumerateFiles(path, "*", searchOption).Where(file =>
!FileSystem.Path.GetFileName(file).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith));
} }
/// <summary>
/// Copies a file into a directory. Does not maintain parent folder of file.
/// Will create target directory if doesn't exist. Automatically overwrites what is there.
/// </summary>
/// <param name="fullFilePath"></param>
/// <param name="targetDirectory"></param>
public void CopyFileToDirectory(string fullFilePath, string targetDirectory) public void CopyFileToDirectory(string fullFilePath, string targetDirectory)
{ {
try try
{ {
var fileInfo = new FileInfo(fullFilePath); var fileInfo = FileSystem.FileInfo.FromFileName(fullFilePath);
if (fileInfo.Exists) if (fileInfo.Exists)
{ {
fileInfo.CopyTo(Path.Join(targetDirectory, fileInfo.Name), true); ExistOrCreate(targetDirectory);
fileInfo.CopyTo(FileSystem.Path.Join(targetDirectory, fileInfo.Name), true);
} }
} }
catch (Exception ex) catch (Exception ex)
@ -138,19 +205,19 @@ namespace API.Services
} }
/// <summary> /// <summary>
/// Copies a Directory with all files and subdirectories to a target location /// Copies all files and subdirectories within a directory to a target location
/// </summary> /// </summary>
/// <param name="sourceDirName"></param> /// <param name="sourceDirName">Directory to copy from. Does not copy the parent folder</param>
/// <param name="destDirName"></param> /// <param name="destDirName">Destination to copy to. Will be created if doesn't exist</param>
/// <param name="searchPattern">Defaults to *, meaning all files</param> /// <param name="searchPattern">Defaults to all files</param>
/// <returns></returns> /// <returns>If was successful</returns>
/// <exception cref="DirectoryNotFoundException"></exception> /// <exception cref="DirectoryNotFoundException">Thrown when source directory does not exist</exception>
public static bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "") public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "")
{ {
if (string.IsNullOrEmpty(sourceDirName)) return false; if (string.IsNullOrEmpty(sourceDirName)) return false;
// Get the subdirectories for the specified directory. // Get the subdirectories for the specified directory.
var dir = new DirectoryInfo(sourceDirName); var dir = FileSystem.DirectoryInfo.FromDirectoryName(sourceDirName);
if (!dir.Exists) if (!dir.Exists)
{ {
@ -165,17 +232,17 @@ namespace API.Services
ExistOrCreate(destDirName); ExistOrCreate(destDirName);
// Get the files in the directory and copy them to the new location. // Get the files in the directory and copy them to the new location.
var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => new FileInfo(n)); var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => FileSystem.FileInfo.FromFileName(n));
foreach (var file in files) foreach (var file in files)
{ {
var tempPath = Path.Combine(destDirName, file.Name); var tempPath = FileSystem.Path.Combine(destDirName, file.Name);
file.CopyTo(tempPath, false); file.CopyTo(tempPath, false);
} }
// If copying subdirectories, copy them and their contents to new location. // If copying subdirectories, copy them and their contents to new location.
foreach (var subDir in dirs) foreach (var subDir in dirs)
{ {
var tempPath = Path.Combine(destDirName, subDir.Name); var tempPath = FileSystem.Path.Combine(destDirName, subDir.Name);
CopyDirectoryToDirectory(subDir.FullName, tempPath); CopyDirectoryToDirectory(subDir.FullName, tempPath);
} }
@ -187,19 +254,20 @@ namespace API.Services
/// </summary> /// </summary>
/// <param name="path"></param> /// <param name="path"></param>
/// <returns></returns> /// <returns></returns>
public static bool IsDriveMounted(string path) public bool IsDriveMounted(string path)
{ {
return new DirectoryInfo(Path.GetPathRoot(path) ?? string.Empty).Exists; return FileSystem.DirectoryInfo.FromDirectoryName(FileSystem.Path.GetPathRoot(path) ?? string.Empty).Exists;
} }
public static string[] GetFilesWithExtension(string path, string searchPatternExpression = "") public string[] GetFilesWithExtension(string path, string searchPatternExpression = "")
{ {
// TODO: Use GitFiles instead
if (searchPatternExpression != string.Empty) if (searchPatternExpression != string.Empty)
{ {
return GetFilesWithCertainExtensions(path, searchPatternExpression).ToArray(); return GetFilesWithCertainExtensions(path, searchPatternExpression).ToArray();
} }
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path); return !FileSystem.Directory.Exists(path) ? Array.Empty<string>() : FileSystem.Directory.GetFiles(path);
} }
/// <summary> /// <summary>
@ -207,9 +275,9 @@ namespace API.Services
/// </summary> /// </summary>
/// <param name="paths"></param> /// <param name="paths"></param>
/// <returns>Total bytes</returns> /// <returns>Total bytes</returns>
public static long GetTotalSize(IEnumerable<string> paths) public long GetTotalSize(IEnumerable<string> paths)
{ {
return paths.Sum(path => new FileInfo(path).Length); return paths.Sum(path => FileSystem.FileInfo.FromFileName(path).Length);
} }
/// <summary> /// <summary>
@ -217,13 +285,13 @@ namespace API.Services
/// </summary> /// </summary>
/// <param name="directoryPath"></param> /// <param name="directoryPath"></param>
/// <returns></returns> /// <returns></returns>
public static bool ExistOrCreate(string directoryPath) public bool ExistOrCreate(string directoryPath)
{ {
var di = new DirectoryInfo(directoryPath); var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath);
if (di.Exists) return true; if (di.Exists) return true;
try try
{ {
Directory.CreateDirectory(directoryPath); FileSystem.Directory.CreateDirectory(directoryPath);
} }
catch (Exception) catch (Exception)
{ {
@ -236,11 +304,11 @@ namespace API.Services
/// Deletes all files within the directory, then the directory itself. /// Deletes all files within the directory, then the directory itself.
/// </summary> /// </summary>
/// <param name="directoryPath"></param> /// <param name="directoryPath"></param>
public static void ClearAndDeleteDirectory(string directoryPath) public void ClearAndDeleteDirectory(string directoryPath)
{ {
if (!Directory.Exists(directoryPath)) return; if (!FileSystem.Directory.Exists(directoryPath)) return;
DirectoryInfo di = new DirectoryInfo(directoryPath); var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath);
ClearDirectory(directoryPath); ClearDirectory(directoryPath);
@ -248,13 +316,13 @@ namespace API.Services
} }
/// <summary> /// <summary>
/// Deletes all files within the directory. /// Deletes all files and folders within the directory path
/// </summary> /// </summary>
/// <param name="directoryPath"></param> /// <param name="directoryPath"></param>
/// <returns></returns> /// <returns></returns>
public static void ClearDirectory(string directoryPath) public void ClearDirectory(string directoryPath)
{ {
var di = new DirectoryInfo(directoryPath); var di = FileSystem.DirectoryInfo.FromDirectoryName(directoryPath);
if (!di.Exists) return; if (!di.Exists) return;
foreach (var file in di.EnumerateFiles()) foreach (var file in di.EnumerateFiles())
@ -274,7 +342,7 @@ namespace API.Services
/// <param name="directoryPath"></param> /// <param name="directoryPath"></param>
/// <param name="prepend">An optional string to prepend to the target file's name</param> /// <param name="prepend">An optional string to prepend to the target file's name</param>
/// <returns></returns> /// <returns></returns>
public static bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "", ILogger logger = null) public bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "")
{ {
ExistOrCreate(directoryPath); ExistOrCreate(directoryPath);
string currentFile = null; string currentFile = null;
@ -283,36 +351,36 @@ namespace API.Services
foreach (var file in filePaths) foreach (var file in filePaths)
{ {
currentFile = file; currentFile = file;
var fileInfo = new FileInfo(file); var fileInfo = FileSystem.FileInfo.FromFileName(file);
if (fileInfo.Exists) if (fileInfo.Exists)
{ {
fileInfo.CopyTo(Path.Join(directoryPath, prepend + fileInfo.Name)); fileInfo.CopyTo(FileSystem.Path.Join(directoryPath, prepend + fileInfo.Name));
} }
else else
{ {
logger?.LogWarning("Tried to copy {File} but it doesn't exist", file); _logger.LogWarning("Tried to copy {File} but it doesn't exist", file);
} }
} }
} }
catch (Exception ex) catch (Exception ex)
{ {
logger?.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath); _logger.LogError(ex, "Unable to copy {File} to {DirectoryPath}", currentFile, directoryPath);
return false; return false;
} }
return true; return true;
} }
public bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath, string prepend = "") /// <summary>
{ /// Lists all directories in a root path. Will exclude Hidden or System directories.
return CopyFilesToDirectory(filePaths, directoryPath, prepend, _logger); /// </summary>
} /// <param name="rootPath"></param>
/// <returns></returns>
public IEnumerable<string> ListDirectory(string rootPath) public IEnumerable<string> ListDirectory(string rootPath)
{ {
if (!Directory.Exists(rootPath)) return ImmutableList<string>.Empty; if (!FileSystem.Directory.Exists(rootPath)) return ImmutableList<string>.Empty;
var di = new DirectoryInfo(rootPath); var di = FileSystem.DirectoryInfo.FromDirectoryName(rootPath);
var dirs = di.GetDirectories() var dirs = di.GetDirectories()
.Where(dir => !(dir.Attributes.HasFlag(FileAttributes.Hidden) || dir.Attributes.HasFlag(FileAttributes.System))) .Where(dir => !(dir.Attributes.HasFlag(FileAttributes.Hidden) || dir.Attributes.HasFlag(FileAttributes.System)))
.Select(d => d.Name).ToImmutableList(); .Select(d => d.Name).ToImmutableList();
@ -320,20 +388,26 @@ namespace API.Services
return dirs; return dirs;
} }
/// <summary>
/// Reads a file's into byte[]. Returns empty array if file doesn't exist.
/// </summary>
/// <param name="path"></param>
/// <returns></returns>
public async Task<byte[]> ReadFileAsync(string path) public async Task<byte[]> ReadFileAsync(string path)
{ {
if (!File.Exists(path)) return Array.Empty<byte>(); if (!FileSystem.File.Exists(path)) return Array.Empty<byte>();
return await File.ReadAllBytesAsync(path); return await FileSystem.File.ReadAllBytesAsync(path);
} }
/// <summary> /// <summary>
/// Finds the highest directories from a set of MangaFiles /// Finds the highest directories from a set of file paths. Does not return the root path, will always select the highest non-root path.
/// </summary> /// </summary>
/// <remarks>If the file paths do not contain anything from libraryFolders, this returns an empty dictionary back</remarks>
/// <param name="libraryFolders">List of top level folders which files belong to</param> /// <param name="libraryFolders">List of top level folders which files belong to</param>
/// <param name="filePaths">List of file paths that belong to libraryFolders</param> /// <param name="filePaths">List of file paths that belong to libraryFolders</param>
/// <returns></returns> /// <returns></returns>
public static Dictionary<string, string> FindHighestDirectoriesFromFiles(IEnumerable<string> libraryFolders, IList<string> filePaths) public Dictionary<string, string> FindHighestDirectoriesFromFiles(IEnumerable<string> libraryFolders, IList<string> filePaths)
{ {
var stopLookingForDirectories = false; var stopLookingForDirectories = false;
var dirs = new Dictionary<string, string>(); var dirs = new Dictionary<string, string>();
@ -385,9 +459,10 @@ namespace API.Services
// Data structure to hold names of subfolders to be examined for files. // Data structure to hold names of subfolders to be examined for files.
var dirs = new Stack<string>(); var dirs = new Stack<string>();
if (!Directory.Exists(root)) { if (!FileSystem.Directory.Exists(root)) {
throw new ArgumentException("The directory doesn't exist"); throw new ArgumentException("The directory doesn't exist");
} }
dirs.Push(root); dirs.Push(root);
while (dirs.Count > 0) { while (dirs.Count > 0) {
@ -396,7 +471,7 @@ namespace API.Services
string[] files; string[] files;
try { try {
subDirs = Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0); subDirs = FileSystem.Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0);
} }
// Thrown if we do not have discovery permission on the directory. // Thrown if we do not have discovery permission on the directory.
catch (UnauthorizedAccessException e) { catch (UnauthorizedAccessException e) {
@ -412,6 +487,7 @@ namespace API.Services
} }
try { try {
// TODO: Replace this with GetFiles - It's the same code
files = GetFilesWithCertainExtensions(currentDir, searchPattern) files = GetFilesWithCertainExtensions(currentDir, searchPattern)
.ToArray(); .ToArray();
} }
@ -457,6 +533,7 @@ namespace API.Services
if (ex is UnauthorizedAccessException) { if (ex is UnauthorizedAccessException) {
// Here we just output a message and go on. // Here we just output a message and go on.
Console.WriteLine(ex.Message); Console.WriteLine(ex.Message);
_logger.LogError(ex, "Unauthorized access on file");
return true; return true;
} }
// Handle other exceptions here if necessary... // Handle other exceptions here if necessary...
@ -478,13 +555,13 @@ namespace API.Services
/// Attempts to delete the files passed to it. Swallows exceptions. /// Attempts to delete the files passed to it. Swallows exceptions.
/// </summary> /// </summary>
/// <param name="files">Full path of files to delete</param> /// <param name="files">Full path of files to delete</param>
public static void DeleteFiles(IEnumerable<string> files) public void DeleteFiles(IEnumerable<string> files)
{ {
foreach (var file in files) foreach (var file in files)
{ {
try try
{ {
new FileInfo(file).Delete(); FileSystem.FileInfo.FromFileName(file).Delete();
} }
catch (Exception) catch (Exception)
{ {
@ -547,5 +624,78 @@ namespace API.Services
// Return formatted number with suffix // Return formatted number with suffix
return readable.ToString("0.## ") + suffix; return readable.ToString("0.## ") + suffix;
} }
/// <summary>
/// Removes all files except images from the directory. Includes sub directories.
/// </summary>
/// <param name="directoryName">Fully qualified directory</param>
public void RemoveNonImages(string directoryName)
{
DeleteFiles(GetFiles(directoryName, searchOption:SearchOption.AllDirectories).Where(file => !Parser.Parser.IsImage(file)));
}
/// <summary>
/// Flattens all files in subfolders to the passed directory recursively.
///
///
/// foo<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// ├── 3.txt<para />
/// ├── 4.txt<para />
/// └── bar<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// └── 5.txt<para />
///
/// becomes:<para />
/// foo<para />
/// ├── 1.txt<para />
/// ├── 2.txt<para />
/// ├── 3.txt<para />
/// ├── 4.txt<para />
/// ├── bar_1.txt<para />
/// ├── bar_2.txt<para />
/// └── bar_5.txt<para />
/// </summary>
/// <param name="directoryName">Fully qualified Directory name</param>
public void Flatten(string directoryName)
{
if (string.IsNullOrEmpty(directoryName) || !FileSystem.Directory.Exists(directoryName)) return;
var directory = FileSystem.DirectoryInfo.FromDirectoryName(directoryName);
var index = 0;
FlattenDirectory(directory, directory, ref index);
}
private void FlattenDirectory(IDirectoryInfo root, IDirectoryInfo directory, ref int directoryIndex)
{
if (!root.FullName.Equals(directory.FullName))
{
var fileIndex = 1;
foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, new NaturalSortComparer()))
{
if (file.Directory == null) continue;
var paddedIndex = Parser.Parser.PadZeros(directoryIndex + "");
// We need to rename the files so that after flattening, they are in the order we found them
var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}";
var newPath = Path.Join(root.FullName, newName);
if (!File.Exists(newPath)) file.MoveTo(newPath);
fileIndex++;
}
directoryIndex++;
}
var sort = new NaturalSortComparer();
foreach (var subDirectory in directory.EnumerateDirectories().OrderBy(d => d.FullName, sort))
{
FlattenDirectory(root, subDirectory, ref directoryIndex);
}
}
} }
} }

View File

@ -3,18 +3,17 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Interfaces.Services;
using Microsoft.AspNetCore.StaticFiles; using Microsoft.AspNetCore.StaticFiles;
namespace API.Services namespace API.Services;
public interface IDownloadService
{ {
public interface IDownloadService
{
Task<(byte[], string, string)> GetFirstFileDownload(IEnumerable<MangaFile> files); Task<(byte[], string, string)> GetFirstFileDownload(IEnumerable<MangaFile> files);
string GetContentTypeFromFile(string filepath); string GetContentTypeFromFile(string filepath);
} }
public class DownloadService : IDownloadService public class DownloadService : IDownloadService
{ {
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly FileExtensionContentTypeProvider _fileTypeProvider = new FileExtensionContentTypeProvider(); private readonly FileExtensionContentTypeProvider _fileTypeProvider = new FileExtensionContentTypeProvider();
@ -54,5 +53,4 @@ namespace API.Services
return contentType; return contentType;
} }
}
} }

View File

@ -1,7 +1,6 @@
using System; using System;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Interfaces;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Hosting;

View File

@ -3,19 +3,40 @@ using System.IO;
using System.Linq; using System.Linq;
using API.Comparators; using API.Comparators;
using API.Entities; using API.Entities;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NetVips; using NetVips;
namespace API.Services namespace API.Services;
{
public class ImageService : IImageService public interface IImageService
{ {
void ExtractImages(string fileFilePath, string targetDirectory, int fileCount);
string GetCoverImage(string path, string fileName);
string GetCoverFile(MangaFile file);
/// <summary>
/// Creates a Thumbnail version of an image
/// </summary>
/// <param name="path">Path to the image file</param>
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
//string CreateThumbnail(string path, string fileName);
/// <summary>
/// Creates a Thumbnail version of a base64 image
/// </summary>
/// <param name="encodedImage">base64 encoded image</param>
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
string CreateThumbnailFromBase64(string encodedImage, string fileName);
string WriteCoverThumbnail(Stream stream, string fileName);
}
public class ImageService : IImageService
{
private readonly ILogger<ImageService> _logger; private readonly ILogger<ImageService> _logger;
private readonly IDirectoryService _directoryService;
public const string ChapterCoverImageRegex = @"v\d+_c\d+"; public const string ChapterCoverImageRegex = @"v\d+_c\d+";
public const string SeriesCoverImageRegex = @"seres\d+"; public const string SeriesCoverImageRegex = @"series_\d+";
public const string CollectionTagCoverImageRegex = @"tag\d+"; public const string CollectionTagCoverImageRegex = @"tag_\d+";
/// <summary> /// <summary>
@ -23,9 +44,24 @@ namespace API.Services
/// </summary> /// </summary>
private const int ThumbnailWidth = 320; private const int ThumbnailWidth = 320;
public ImageService(ILogger<ImageService> logger) public ImageService(ILogger<ImageService> logger, IDirectoryService directoryService)
{ {
_logger = logger; _logger = logger;
_directoryService = directoryService;
}
public void ExtractImages(string fileFilePath, string targetDirectory, int fileCount = 1)
{
_directoryService.ExistOrCreate(targetDirectory);
if (fileCount == 1)
{
_directoryService.CopyFileToDirectory(fileFilePath, targetDirectory);
}
else
{
_directoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(fileFilePath), targetDirectory,
Parser.Parser.ImageFileExtensions);
}
} }
/// <summary> /// <summary>
@ -42,7 +78,7 @@ namespace API.Services
return null; return null;
} }
var firstImage = DirectoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions) var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions)
.OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault(); .OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault();
return firstImage; return firstImage;
@ -54,7 +90,11 @@ namespace API.Services
try try
{ {
return CreateThumbnail(path, fileName); //return CreateThumbnail(path, fileName);
using var thumbnail = Image.Thumbnail(path, ThumbnailWidth);
var filename = fileName + ".png";
thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, filename));
return filename;
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -65,22 +105,22 @@ namespace API.Services
} }
/// <inheritdoc /> /// <inheritdoc />
public string CreateThumbnail(string path, string fileName) // public string CreateThumbnail(string path, string fileName)
{ // {
try // try
{ // {
using var thumbnail = Image.Thumbnail(path, ThumbnailWidth); // using var thumbnail = Image.Thumbnail(path, ThumbnailWidth);
var filename = fileName + ".png"; // var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, filename)); // thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, filename));
return filename; // return filename;
} // }
catch (Exception e) // catch (Exception e)
{ // {
_logger.LogError(e, "Error creating thumbnail from url"); // _logger.LogError(e, "Error creating thumbnail from url");
} // }
//
return string.Empty; // return string.Empty;
} // }
/// <summary> /// <summary>
/// Creates a thumbnail out of a memory stream and saves to <see cref="DirectoryService.CoverImageDirectory"/> with the passed /// Creates a thumbnail out of a memory stream and saves to <see cref="DirectoryService.CoverImageDirectory"/> with the passed
@ -89,11 +129,11 @@ namespace API.Services
/// <param name="stream">Stream to write to disk. Ensure this is rewinded.</param> /// <param name="stream">Stream to write to disk. Ensure this is rewinded.</param>
/// <param name="fileName">filename to save as without extension</param> /// <param name="fileName">filename to save as without extension</param>
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns> /// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
public static string WriteCoverThumbnail(Stream stream, string fileName) public string WriteCoverThumbnail(Stream stream, string fileName)
{ {
using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth); using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth);
var filename = fileName + ".png"; var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png")); thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, fileName + ".png"));
return filename; return filename;
} }
@ -105,7 +145,7 @@ namespace API.Services
{ {
using var thumbnail = Image.ThumbnailBuffer(Convert.FromBase64String(encodedImage), ThumbnailWidth); using var thumbnail = Image.ThumbnailBuffer(Convert.FromBase64String(encodedImage), ThumbnailWidth);
var filename = fileName + ".png"; var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png")); thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, fileName + ".png"));
return filename; return filename;
} }
catch (Exception e) catch (Exception e)
@ -146,5 +186,4 @@ namespace API.Services
{ {
return $"tag{tagId}"; return $"tag{tagId}";
} }
}
} }

Some files were not shown because too many files have changed in this diff Show More