Kavita/API.Tests/Services/CacheServiceTests.cs
Joe Milazzo 5d1dd7b3f0
.NET 7 + Spring Cleaning (#1677)
* Updated to net7.0

* Updated GA to .net 7

* Updated System.IO.Abstractions to use New factory.

* Converted Regex into SourceGenerator in Parser.

* Updated more regex to source generators.

* Enabled Nullability and more regex changes throughout codebase.

* Parser is 100% GeneratedRegexified

* Lots of nullability code

* Enabled nullability for all repositories.

* Fixed another unit test

* Refactored some code around and took care of some todos.

* Updating code for nullability and cleaning up methods that aren't used anymore. Refctored all uses of Parser.Normalize() to use new extension

* More nullability exercises. 500 warnings to go.

* Fixed a bug where custom file uploads for entities wouldn't save in webP.

* Nullability is done for all DTOs

* Fixed all unit tests and nullability for the project. Only OPDS is left which will be done with an upcoming OPDS enhancement.

* Use localization in book service after validating

* Code smells

* Switched to preview build of swashbuckle for .net7 support

* Fixed up merge issues

* Disable emulate comic book when on single page reader

* Fixed a regression where double page renderer wouldn't layout the images correctly

* Updated to swashbuckle which support .net 7

* Fixed a bad GA action

* Some code cleanup

* More code smells

* Took care of most of nullable issues

* Fixed a broken test due to having more than one test run in parallel

* I'm really not sure why the unit tests are failing or are so extremely slow on .net 7

* Updated all dependencies

* Fixed up build and removed hardcoded framework from build scripts. (this merge removes Regex Source generators). Unit tests are completely busted.

* Unit tests and code cleanup. Needs shakeout now.

* Adjusted Series model since a few fields are not-nullable. Removed dead imports on the project.

* Refactored to use Builder pattern for all unit tests.

* Switched nullability down to warnings. It wasn't possible to switch due to constraint issues in DB Migration.
2023-03-05 12:55:13 -08:00

531 lines
17 KiB
C#

using System.Collections.Generic;
using System.Data.Common;
using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
using API.Entities;
using API.Entities.Enums;
using API.Parser;
using API.Services;
using API.SignalR;
using AutoMapper;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
internal class MockReadingItemServiceForCacheService : IReadingItemService
{
private readonly DirectoryService _directoryService;
public MockReadingItemServiceForCacheService(DirectoryService directoryService)
{
_directoryService = directoryService;
}
public ComicInfo GetComicInfo(string filePath)
{
return null;
}
public int GetNumberOfPages(string filePath, MangaFormat format)
{
return 1;
}
public string GetCoverImage(string fileFilePath, string fileName, MangaFormat format, bool saveAsWebP)
{
return string.Empty;
}
public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1)
{
throw new System.NotImplementedException();
}
public ParserInfo Parse(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
public ParserInfo ParseFile(string path, string rootPath, LibraryType type)
{
throw new System.NotImplementedException();
}
}
public class CacheServiceTests
{
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new Library()
{
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Path = "C:/data/"
}
}
});
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
#region Ensure
[Fact]
public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
filesystem.AddDirectory($"{CacheDirectory}1/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
await ResetDB();
var s = DbFactory.Series("Test");
var v = DbFactory.Volume("1");
var c = new Chapter()
{
Number = "1",
Range = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Format = MangaFormat.Archive,
FilePath = $"{DataDirectory}Test v1.zip",
}
}
};
v.Chapters.Add(c);
s.Volumes.Add(v);
s.LibraryId = 1;
_context.Series.Add(s);
await _context.SaveChangesAsync();
await cleanupService.Ensure(1);
Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
}
// [Fact]
// public async Task Ensure_DirectoryAlreadyExists_ExtractsImages()
// {
// // TODO: Figure out a way to test this
// var filesystem = CreateFileSystem();
// filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
// filesystem.AddDirectory($"{CacheDirectory}1/");
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var archiveService = Substitute.For<IArchiveService>();
// archiveService.ExtractArchive($"{DataDirectory}Test v1.zip",
// filesystem.Path.Join(CacheDirectory, "1"));
// var cleanupService = new CacheService(_logger, _unitOfWork, ds,
// new ReadingItemService(archiveService, Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds));
//
// await ResetDB();
// var s = DbFactory.Series("Test");
// var v = DbFactory.Volume("1");
// var c = new Chapter()
// {
// Number = "1",
// Files = new List<MangaFile>()
// {
// new MangaFile()
// {
// Format = MangaFormat.Archive,
// FilePath = $"{DataDirectory}Test v1.zip",
// }
// }
// };
// v.Chapters.Add(c);
// s.Volumes.Add(v);
// s.LibraryId = 1;
// _context.Series.Add(s);
//
// await _context.SaveChangesAsync();
//
// await cleanupService.Ensure(1);
// Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
// }
#endregion
#region CleanupChapters
[Fact]
public void CleanupChapters_AllFilesShouldBeDeleted()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
cleanupService.CleanupChapters(new []{1, 3});
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption:SearchOption.AllDirectories));
}
#endregion
#region GetCachedEpubFile
[Fact]
public void GetCachedEpubFile_ShouldReturnFirstEpub()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
var c = new Chapter()
{
Number = "1",
Range = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
FilePath = $"{DataDirectory}1.epub"
},
new MangaFile()
{
FilePath = $"{DataDirectory}2.epub"
}
}
};
cs.GetCachedFile(c);
Assert.Same($"{DataDirectory}1.epub", cs.GetCachedFile(c));
}
#endregion
#region GetCachedPagePath
[Fact]
public void GetCachedPagePath_ReturnNullIfNoFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Number = "1",
Range = "1",
Files = new List<MangaFile>()
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages - 1; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
var path = cs.GetCachedPagePath(c.Id, 11);
Assert.Equal(string.Empty, path);
}
[Fact]
public void GetCachedPagePath_GetFileFromFirstFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Number = "1",
Range = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/00{fileIndex}_00{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_001.jpg"), ds.FileSystem.Path.GetFullPath(cs.GetCachedPagePath(c.Id, 0)));
}
[Fact]
public void GetCachedPagePath_GetLastPageFromSingleFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Number = "1",
Range = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
}
}
};
c.Pages = c.Files.Sum(f => f.Pages);
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the 10th file
var path = cs.GetCachedPagePath(c.Id, c.Pages);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/000_0{c.Pages}.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
[Fact]
public void GetCachedPagePath_GetFileFromSecondFile()
{
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.zip", new MockFileData(""));
var c = new Chapter()
{
Id = 1,
Number = "1",
Range = "1",
Files = new List<MangaFile>()
{
new MangaFile()
{
Id = 1,
FilePath = $"{DataDirectory}1.zip",
Pages = 10
},
new MangaFile()
{
Id = 2,
FilePath = $"{DataDirectory}2.zip",
Pages = 5
}
}
};
var fileIndex = 0;
foreach (var file in c.Files)
{
for (var i = 0; i < file.Pages; i++)
{
filesystem.AddFile($"{CacheDirectory}1/{fileIndex}/{i+1}.jpg", new MockFileData(""));
}
fileIndex++;
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds), Substitute.For<IBookmarkService>());
// Flatten to prepare for how GetFullPath expects
ds.Flatten($"{CacheDirectory}1/");
// Remember that we start at 0, so this is the page + 1 file
var path = cs.GetCachedPagePath(c.Id, 10);
Assert.Equal(ds.FileSystem.Path.GetFullPath($"{CacheDirectory}/1/001_001.jpg"), ds.FileSystem.Path.GetFullPath(path));
}
#endregion
#region ExtractChapterFiles
// [Fact]
// public void ExtractChapterFiles_ShouldExtractOnlyImages()
// {
// const string testDirectory = "/manga/";
// var fileSystem = new MockFileSystem();
// for (var i = 0; i < 10; i++)
// {
// fileSystem.AddFile($"{testDirectory}file_{i}.zip", new MockFileData(""));
// }
//
// fileSystem.AddDirectory(CacheDirectory);
//
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
// var cs = new CacheService(_logger, _unitOfWork, ds,
// new MockReadingItemServiceForCacheService(ds));
//
//
// cs.ExtractChapterFiles(CacheDirectory, new List<MangaFile>()
// {
// new MangaFile()
// {
// ChapterId = 1,
// Format = MangaFormat.Archive,
// Pages = 2,
// FilePath =
// }
// })
// }
#endregion
}