Kavita/API.Tests/Services/ScannerServiceTests.cs
Joseph Milazzo a01613f80f
EPUB Support (#178)
* Added book filetype detection and reorganized tests due to size of file

* Added ability to get basic Parse Info from Book and Pages.

* We can now scan books and get them in a library with cover images.

* Take the first image in the epub if the cover isn't set.

* Implemented the ability to unzip the ebup to cache. Implemented a test api to load html files.

* Just some test code to figure out how to approach this.

* Fixed some merge conflicts

* Removed some dead code from merge

* Snapshot: I can now load everything properly into the UI by rewriting the urls before I send them back. I don't notice any lag from this method. It can be optimized further.

* Implemented a way to load the content in the browser not via an iframe.

* Added a note

* Anchor mappings is complete. New anchors are updated so references now resolve to javascript:void() for UI to take care of internally loading and the appropriate page is mapped to it. Anchors that are external have target="_blank" added so they don't force you out of the app and styles are of course inlined.

* Oops i need this

* Table of contents api implemented (rough) and some small enhancements to codebase for books.

* GetBookPageResources now only loads files from within the book. Nested chapter list support and images now use html parsing instead of string parsing.

* Fonts now are remapped to load from endpoint.

* book-resources now uses a key, ensuring the file is in proper format for lookup. Changed chapter list based on structure with one HEADER and nested chapters.

* Properly handle svg resource requests and when there are part anchors that are clickable, make sure we handle them in the UI by adding a kavita-page handler.

* Add Chapter group page even if one isn't set by using first page (without part) from nestedChildren.

* Added extra debug code for issue #163.

* Added new user preferences for books and updated the css so we scope it to our reading section.

* Cleaned up style code

* Implemented ability to save book preferences and some cleanup on existing apis.

* Added an api for checking if a user has read something in a library type before.

* Forgot to make sure the has reading progress is against a user lol.

* Remove cacheservice code for books, sine we use an in-memory method

* Handle svg images as well

* Enhanced cover image extraction to check for a "cover" image if the cover image wasn't set in OPF before falling back to the first image.

* Fixed an issue with special books not properly generating metadata due to not having filename set.

* Cleanup, removed warmup task code from statup/program and changed taskscheduler to schedule tasks on startup only (or if tasks are changed from UI).

* Code cleanup

* Code cleanup

* So much code. Lots of refactors to try to test scanner service. Moved a lot of the queries into Extensions to allow to easier test, even though it's hacky. Support @font-face src:url swaps with ' and ". Source summary information from epubs.

* Well...baseURL needs to come from BE and not from UI lol.

* Adjusted migrations so default values match Entity

* Removed comment

* I think I finally fixed #163! The issue was that when i checked if it had a parserInfo, i wasn't considering that the chapter range might have a - in it (0-6) and so when the code to check if range could parse out a number failed, it treated it like a special and checked range against info's filename.

* Some bugfixes

* Lots of testing, extracting code to make it easier to test. This code is buggy, but fixed a bug where 1) If we changed the normalization code, we would remove the whole db during a scan and 2) We weren't actually removing series properly.

Other than that, code is being extracted to remove duplication and centralize logic.

* More code cleanup and test cleanup to ensure scan loop is working as expected and matches expectaions from tests.

* Cleaned up the code and made it so if I change normalization, which I do in this branch, it wont break existing DBs.

* Some comic parser changes for partial chapter support.

* Added some code for directory service and scanner service along with python code to generate test files (not used yet). Fixed up all the tests.

* Code smells
2021-04-28 16:16:22 -05:00

274 lines
11 KiB
C#

using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Data.Common;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services;
using API.Services.Tasks;
using API.Tests.Helpers;
using AutoMapper;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services
{
public class ScannerServiceTests : IDisposable
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ScannerService _scannerService;
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
private readonly IBookService _bookService = Substitute.For<IBookService>();
private readonly IMetadataService _metadataService;
private readonly ILogger<MetadataService> _metadataLogger = Substitute.For<ILogger<MetadataService>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
public ScannerServiceTests(ITestOutputHelper testOutputHelper)
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
//BackgroundJob.Enqueue is what I need to mock or something (it's static...)
// ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
// IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
// IBackgroundJobClient jobClient
//var taskScheduler = new TaskScheduler(Substitute.For<ICacheService>(), Substitute.For<ILogger<TaskScheduler>>(), Substitute.For<)
// Substitute.For<UserManager<AppUser>>() - Not needed because only for UserService
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null,
Substitute.For<ILogger<UnitOfWork>>());
_testOutputHelper = testOutputHelper;
_metadataService= Substitute.For<MetadataService>(_unitOfWork, _metadataLogger, _archiveService, _bookService);
_scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService, _bookService);
}
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
await Seed.SeedSettings(_context);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga")
}
}
});
return await _context.SaveChangesAsync() > 0;
}
// [Fact]
// public void Test()
// {
// _scannerService.ScanLibrary(1, false);
//
// var series = _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Result.Series;
// }
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var infos = new Dictionary<string, List<ParserInfo>>();
AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10"});
var existingSeries = new List<Series>();
existingSeries.Add(new Series()
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = API.Parser.Parser.Normalize("Cage of Eden")
});
existingSeries.Add(new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = API.Parser.Parser.Normalize("Darker Than Black")
});
var expectedSeries = new List<Series>();
Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
[Theory]
[InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
[InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
[InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
[InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
{
var collectedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
foreach (var seriesName in existingSeriesNames)
{
AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName});
}
var actualName = _scannerService.MergeName(collectedSeries, new ParserInfo()
{
Series = parsedInfoName
});
Assert.Equal(expected, actualName);
}
[Fact]
public void RemoveMissingSeries_Should_RemoveSeries()
{
var existingSeries = new List<Series>()
{
EntityFactory.CreateSeries("Darker than Black Vol 1"),
EntityFactory.CreateSeries("Darker than Black"),
EntityFactory.CreateSeries("Beastars"),
};
var missingSeries = new List<Series>()
{
EntityFactory.CreateSeries("Darker than Black Vol 1"),
};
existingSeries = ScannerService.RemoveMissingSeries(existingSeries, missingSeries, out var removeCount).ToList();
Assert.DoesNotContain(missingSeries[0].Name, existingSeries.Select(s => s.Name));
Assert.Equal(missingSeries.Count, removeCount);
}
private void AddToParsedInfo(IDictionary<string, List<ParserInfo>> collectedSeries, ParserInfo info)
{
if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>))
{
((ConcurrentDictionary<string, List<ParserInfo>>) collectedSeries).AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (_, oldValue) =>
{
oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info))
{
oldValue.Add(info);
}
return oldValue;
});
}
else
{
if (!collectedSeries.ContainsKey(info.Series))
{
collectedSeries.Add(info.Series, new List<ParserInfo>() {info});
}
else
{
var list = collectedSeries[info.Series];
if (!list.Contains(info))
{
list.Add(info);
}
collectedSeries[info.Series] = list;
}
}
}
// [Fact]
// public void ExistingOrDefault_Should_BeFromLibrary()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black").Id);
// Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker than Black").Id);
// }
//
// [Fact]
// public void ExistingOrDefault_Should_BeFromAllSeries()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Equal(3, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black - Some Extension").Id);
// }
//
// [Fact]
// public void ExistingOrDefault_Should_BeNull()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Null(ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Non existing series"));
// }
[Fact]
public void Should_CreateSeries_Test()
{
// var allSeries = new List<Series>();
// var parsedSeries = new Dictionary<string, List<ParserInfo>>();
//
// parsedSeries.Add("Darker Than Black", new List<ParserInfo>()
// {
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker Than Black", Volumes = "1"},
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker than Black", Volumes = "2"}
// });
//
// _scannerService.UpsertSeries(_libraryMock, parsedSeries, allSeries);
//
// Assert.Equal(1, _libraryMock.Series.Count);
// Assert.Equal(2, _libraryMock.Series.ElementAt(0).Volumes.Count);
// _testOutputHelper.WriteLine(_libraryMock.ToString());
Assert.True(true);
}
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
}
}