Merge branch 'develop' into bugfix/duplicate-volume-markers

# Conflicts:
#	API.Tests/Parsing/MangaParsingTests.cs
#	API/Services/Tasks/Scanner/Parser/Parser.cs
This commit is contained in:
Amelia 2025-08-26 17:40:27 +02:00
commit 99f1c7225a
No known key found for this signature in database
GPG Key ID: 9E3C6B5837A7B0BA
199 changed files with 23159 additions and 4680 deletions

View File

@ -28,7 +28,7 @@ body:
label: Kavita Version Number - If you don't see your version number listed, please update Kavita and see if your issue still persists.
multiple: false
options:
- 0.8.6.2 - Stable
- 0.8.7 - Stable
- Nightly Testing Branch
validations:
required: true

19
.github/copilot-instructions.md vendored Normal file
View File

@ -0,0 +1,19 @@
## Coding Guidelines
- Anytime you don't use {} on an if statement, it must be on one line and MUST be a jump operation (return/continue/break). All other times, you must use {} and be on 2 lines.
- Use var whenever possible
- return statements should generally have a newline above them
Examples:
```csharp
# Case when okay - simple logic flow
var a = 2 + 3;
return a;
# Case when needs newline - complex logic is grouped together
var a = b + c;
_imageService.Resize(...);
return;
```
- Operation (+,-,*, etc) should always have spaces around it; I.e. `a + b` not `a+b`.
- Comma's `,` should always be followed by a space
- When setting href directectly (not using Angulars routing) it should always be prefixed with baseURL

View File

@ -10,8 +10,8 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.15.1" />
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.15.1" />
<PackageReference Include="BenchmarkDotNet" Version="0.15.2" />
<PackageReference Include="BenchmarkDotNet.Annotations" Version="0.15.2" />
<PackageReference Include="NSubstitute" Version="5.3.0" />
</ItemGroup>

View File

@ -6,13 +6,13 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.6" />
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.7" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="NSubstitute" Version="5.3.0" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="22.0.14" />
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="22.0.14" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="22.0.15" />
<PackageReference Include="TestableIO.System.IO.Abstractions.Wrappers" Version="22.0.15" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.1">
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>

View File

@ -1,4 +1,4 @@
using System;
using System;
using System.Data.Common;
using System.Linq;
using System.Threading.Tasks;
@ -15,37 +15,36 @@ using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Polly;
using Xunit.Abstractions;
namespace API.Tests;
public abstract class AbstractDbTest : AbstractFsTest , IDisposable
public abstract class AbstractDbTest(ITestOutputHelper testOutputHelper): AbstractFsTest
{
protected readonly DataContext Context;
protected readonly IUnitOfWork UnitOfWork;
protected readonly IMapper Mapper;
private readonly DbConnection _connection;
private bool _disposed;
protected AbstractDbTest()
protected async Task<(IUnitOfWork, DataContext, IMapper)> CreateDatabase()
{
var contextOptions = new DbContextOptionsBuilder<DataContext>()
.UseSqlite(CreateInMemoryDatabase())
.EnableSensitiveDataLogging()
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
var connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
var context = new DataContext(contextOptions);
Context = new DataContext(contextOptions);
await context.Database.EnsureCreatedAsync();
Context.Database.EnsureCreated(); // Ensure DB schema is created
await SeedDb(context);
Task.Run(SeedDb).GetAwaiter().GetResult();
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
Mapper = config.CreateMapper();
var mapper = config.CreateMapper();
GlobalConfiguration.Configuration.UseInMemoryStorage();
UnitOfWork = new UnitOfWork(Context, Mapper, null);
var unitOfWork = new UnitOfWork(context, mapper, null);
return (unitOfWork, context, mapper);
}
private static DbConnection CreateInMemoryDatabase()
@ -56,81 +55,61 @@ public abstract class AbstractDbTest : AbstractFsTest , IDisposable
return connection;
}
private async Task<bool> SeedDb()
private async Task<bool> SeedDb(DataContext context)
{
try
{
await Context.Database.EnsureCreatedAsync();
await context.Database.EnsureCreatedAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(Context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
await Seed.SeedSettings(context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
var setting = await context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting = await context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
setting = await context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
setting.Value = BookmarkDirectory;
setting = await Context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
setting = await context.ServerSetting.Where(s => s.Key == ServerSettingKey.TotalLogs).SingleAsync();
setting.Value = "10";
Context.ServerSetting.Update(setting);
context.ServerSetting.Update(setting);
Context.Library.Add(new LibraryBuilder("Manga")
context.Library.Add(new LibraryBuilder("Manga")
.WithAllowMetadataMatching(true)
.WithFolderPath(new FolderPathBuilder(DataDirectory).Build())
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
await Seed.SeedMetadataSettings(Context);
await Seed.SeedMetadataSettings(context);
return true;
}
catch (Exception ex)
{
Console.WriteLine($"[SeedDb] Error: {ex.Message}");
testOutputHelper.WriteLine($"[SeedDb] Error: {ex.Message}");
return false;
}
}
protected abstract Task ResetDb();
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (_disposed) return;
if (disposing)
{
Context?.Dispose();
_connection?.Dispose();
}
_disposed = true;
}
/// <summary>
/// Add a role to an existing User. Commits.
/// </summary>
/// <param name="userId"></param>
/// <param name="roleName"></param>
protected async Task AddUserWithRole(int userId, string roleName)
protected async Task AddUserWithRole(DataContext context, int userId, string roleName)
{
var role = new AppRole { Id = userId, Name = roleName, NormalizedName = roleName.ToUpper() };
await Context.Roles.AddAsync(role);
await Context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
await context.Roles.AddAsync(role);
await context.UserRoles.AddAsync(new AppUserRole { UserId = userId, RoleId = userId });
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
}
}

View File

@ -0,0 +1,22 @@
#nullable enable
using System;
using API.Entities.Enums;
using API.Extensions;
using Xunit;
namespace API.Tests.Extensions;
public class EnumExtensionTests
{
[Theory]
[InlineData("Early Childhood", AgeRating.EarlyChildhood, true)]
[InlineData("M", AgeRating.Mature, true)]
[InlineData("ThisIsNotAnAgeRating", default(AgeRating), false)]
public void TryParse<TEnum>(string? value, TEnum expected, bool success) where TEnum : struct, Enum
{
Assert.Equal(EnumExtensions.TryParse(value, out TEnum got), success);
Assert.Equal(expected, got);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,178 @@
using API.Helpers;
using Xunit;
namespace API.Tests.Helpers;
public class BookSortTitlePrefixHelperTests
{
[Theory]
[InlineData("The Avengers", "Avengers")]
[InlineData("A Game of Thrones", "Game of Thrones")]
[InlineData("An American Tragedy", "American Tragedy")]
public void TestEnglishPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("El Quijote", "Quijote")]
[InlineData("La Casa de Papel", "Casa de Papel")]
[InlineData("Los Miserables", "Miserables")]
[InlineData("Las Vegas", "Vegas")]
[InlineData("Un Mundo Feliz", "Mundo Feliz")]
[InlineData("Una Historia", "Historia")]
public void TestSpanishPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("Le Petit Prince", "Petit Prince")]
[InlineData("La Belle et la Bête", "Belle et la Bête")]
[InlineData("Les Misérables", "Misérables")]
[InlineData("Un Amour de Swann", "Amour de Swann")]
[InlineData("Une Vie", "Vie")]
[InlineData("Des Souris et des Hommes", "Souris et des Hommes")]
public void TestFrenchPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("Der Herr der Ringe", "Herr der Ringe")]
[InlineData("Die Verwandlung", "Verwandlung")]
[InlineData("Das Kapital", "Kapital")]
[InlineData("Ein Sommernachtstraum", "Sommernachtstraum")]
[InlineData("Eine Geschichte", "Geschichte")]
public void TestGermanPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("Il Nome della Rosa", "Nome della Rosa")]
[InlineData("La Divina Commedia", "Divina Commedia")]
[InlineData("Lo Hobbit", "Hobbit")]
[InlineData("Gli Ultimi", "Ultimi")]
[InlineData("Le Città Invisibili", "Città Invisibili")]
[InlineData("Un Giorno", "Giorno")]
[InlineData("Una Notte", "Notte")]
public void TestItalianPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("O Alquimista", "Alquimista")]
[InlineData("A Moreninha", "Moreninha")]
[InlineData("Os Lusíadas", "Lusíadas")]
[InlineData("As Meninas", "Meninas")]
[InlineData("Um Defeito de Cor", "Defeito de Cor")]
[InlineData("Uma História", "História")]
public void TestPortuguesePrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("", "")] // Empty string returns empty
[InlineData("Book", "Book")] // Single word, no change
[InlineData("Avengers", "Avengers")] // No prefix, no change
public void TestNoPrefixCases(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("The", "The")] // Just a prefix word alone
[InlineData("A", "A")] // Just single letter prefix alone
[InlineData("Le", "Le")] // French prefix alone
public void TestPrefixWordAlone(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("THE AVENGERS", "AVENGERS")] // All caps
[InlineData("the avengers", "avengers")] // All lowercase
[InlineData("The AVENGERS", "AVENGERS")] // Mixed case
[InlineData("tHe AvEnGeRs", "AvEnGeRs")] // Random case
public void TestCaseInsensitivity(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("Then Came You", "Then Came You")] // "The" + "n" = not a prefix
[InlineData("And Then There Were None", "And Then There Were None")] // "An" + "d" = not a prefix
[InlineData("Elsewhere", "Elsewhere")] // "El" + "sewhere" = not a prefix (no space)
[InlineData("Lesson Plans", "Lesson Plans")] // "Les" + "son" = not a prefix (no space)
[InlineData("Theory of Everything", "Theory of Everything")] // "The" + "ory" = not a prefix
public void TestFalsePositivePrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("The ", "The ")] // Prefix with only space after - returns original
[InlineData("La ", "La ")] // Same for other languages
[InlineData("El ", "El ")] // Same for Spanish
public void TestPrefixWithOnlySpaceAfter(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("The Multiple Spaces", " Multiple Spaces")] // Doesn't trim extra spaces from remainder
[InlineData("Le Petit Prince", " Petit Prince")] // Leading space preserved in remainder
public void TestSpaceHandling(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("The The Matrix", "The Matrix")] // Removes first "The", leaves second
[InlineData("A A Clockwork Orange", "A Clockwork Orange")] // Removes first "A", leaves second
[InlineData("El El Cid", "El Cid")] // Spanish version
public void TestRepeatedPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("L'Étranger", "L'Étranger")] // French contraction - no space, no change
[InlineData("D'Artagnan", "D'Artagnan")] // Contraction - no space, no change
[InlineData("The-Matrix", "The-Matrix")] // Hyphen instead of space - no change
[InlineData("The.Avengers", "The.Avengers")] // Period instead of space - no change
public void TestNonSpaceSeparators(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("三国演义", "三国演义")] // Chinese - no processing due to CJK detection
[InlineData("한국어", "한국어")] // Korean - not in CJK range, would be processed normally
public void TestCjkLanguages(string inputString, string expected)
{
// NOTE: These don't do anything, I am waiting for user input on if these are needed
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("नमस्ते दुनिया", "नमस्ते दुनिया")] // Hindi - not CJK, processed normally
[InlineData("مرحبا بالعالم", "مرحبا بالعالم")] // Arabic - not CJK, processed normally
[InlineData("שלום עולם", "שלום עולם")] // Hebrew - not CJK, processed normally
public void TestNonLatinNonCjkScripts(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
[Theory]
[InlineData("в мире", "мире")] // Russian "в" (in) - should be removed
[InlineData("на столе", "столе")] // Russian "на" (on) - should be removed
[InlineData("с друзьями", "друзьями")] // Russian "с" (with) - should be removed
public void TestRussianPrefixes(string inputString, string expected)
{
Assert.Equal(expected, BookSortTitlePrefixHelper.GetSortTitle(inputString));
}
}

View File

@ -1,35 +1,30 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities.Enums;
using API.Helpers;
using API.Helpers.Builders;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Helpers;
public class PersonHelperTests : AbstractDbTest
public class PersonHelperTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
protected override async Task ResetDb()
{
Context.Series.RemoveRange(Context.Series.ToList());
Context.Person.RemoveRange(Context.Person.ToList());
Context.Library.RemoveRange(Context.Library.ToList());
Context.Series.RemoveRange(Context.Series.ToList());
await Context.SaveChangesAsync();
}
// 1. Test adding new people and keeping existing ones
[Fact]
public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var library = new LibraryBuilder("My Library")
.Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var existingPerson = new PersonBuilder("Joe Shmo").Build();
var chapter = new ChapterBuilder("1").Build();
@ -44,14 +39,14 @@ public class PersonHelperTests : AbstractDbTest
.WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
await unitOfWork.CommitAsync();
// Call UpdateChapterPeopleAsync with one existing and one new person
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo", "New Person" }, PersonRole.Editor, UnitOfWork);
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo", "New Person" }, PersonRole.Editor, unitOfWork);
// Assert existing person retained and new person added
var people = await UnitOfWork.PersonRepository.GetAllPeople();
var people = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Contains(people, p => p.Name == "Joe Shmo");
Assert.Contains(people, p => p.Name == "New Person");
@ -64,13 +59,13 @@ public class PersonHelperTests : AbstractDbTest
[Fact]
public async Task UpdateChapterPeopleAsync_RemovePeople()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var library = new LibraryBuilder("My Library")
.Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
var existingPerson2 = new PersonBuilder("Jane Doe").Build();
@ -86,16 +81,16 @@ public class PersonHelperTests : AbstractDbTest
.Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
await unitOfWork.CommitAsync();
// Call UpdateChapterPeopleAsync with only one person
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, unitOfWork);
// PersonHelper does not remove the Person from the global DbSet itself
await UnitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
await unitOfWork.PersonRepository.RemoveAllPeopleNoLongerAssociated();
var people = await UnitOfWork.PersonRepository.GetAllPeople();
var people = await unitOfWork.PersonRepository.GetAllPeople();
Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
@ -107,13 +102,13 @@ public class PersonHelperTests : AbstractDbTest
[Fact]
public async Task UpdateChapterPeopleAsync_NoChanges()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var library = new LibraryBuilder("My Library")
.Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var existingPerson = new PersonBuilder("Joe Shmo").Build();
var chapter = new ChapterBuilder("1").WithPerson(existingPerson, PersonRole.Editor).Build();
@ -125,13 +120,13 @@ public class PersonHelperTests : AbstractDbTest
.Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
await unitOfWork.CommitAsync();
// Call UpdateChapterPeopleAsync with the same list
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, unitOfWork);
var people = await UnitOfWork.PersonRepository.GetAllPeople();
var people = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Contains(people, p => p.Name == "Joe Shmo");
var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
@ -143,13 +138,13 @@ public class PersonHelperTests : AbstractDbTest
[Fact]
public async Task UpdateChapterPeopleAsync_MultipleRoles()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var library = new LibraryBuilder("My Library")
.Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var person = new PersonBuilder("Joe Shmo").Build();
var chapter = new ChapterBuilder("1").WithPerson(person, PersonRole.Writer).Build();
@ -161,11 +156,11 @@ public class PersonHelperTests : AbstractDbTest
.Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
await unitOfWork.CommitAsync();
// Add same person as Editor
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, UnitOfWork);
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, unitOfWork);
// Ensure that the same person is assigned with two roles
var chapterPeople = chapter
@ -181,13 +176,13 @@ public class PersonHelperTests : AbstractDbTest
[Fact]
public async Task UpdateChapterPeopleAsync_MatchOnAlias_NoChanges()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var library = new LibraryBuilder("My Library")
.Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var person = new PersonBuilder("Joe Doe")
.WithAlias("Jonny Doe")
@ -204,21 +199,21 @@ public class PersonHelperTests : AbstractDbTest
.Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
await unitOfWork.CommitAsync();
// Add on Name
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Doe" }, PersonRole.Editor, UnitOfWork);
await UnitOfWork.CommitAsync();
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Doe" }, PersonRole.Editor, unitOfWork);
await unitOfWork.CommitAsync();
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
var allPeople = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Single(allPeople);
// Add on alias
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Jonny Doe" }, PersonRole.Editor, UnitOfWork);
await UnitOfWork.CommitAsync();
await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Jonny Doe" }, PersonRole.Editor, unitOfWork);
await unitOfWork.CommitAsync();
allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
allPeople = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Single(allPeople);
}

View File

@ -46,8 +46,8 @@ public class DefaultParserTests
[Theory]
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", new [] {"Btooom!", "1", "1"})]
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", new [] {"Btooom!", "1", "2"})]
[InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", new [] {"Monster", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, "1"})]
[InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", new [] {"Hajime no Ippo", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter})]
[InlineData("/manga/Monster/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg", new [] {"Monster", Parser.LooseLeafVolume, "1"})]
[InlineData("/manga/Hajime no Ippo/Artbook/Hajime no Ippo - Artbook.cbz", new [] {"Hajime no Ippo", Parser.LooseLeafVolume, Parser.DefaultChapter})]
public void ParseFromFallbackFolders_ShouldParseSeriesVolumeAndChapter(string inputFile, string[] expectedParseInfo)
{
const string rootDirectory = "/manga/";
@ -119,7 +119,7 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen", Volumes = "1",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
Chapters = Parser.DefaultChapter, Filename = "Vol 1.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -144,7 +144,7 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
Chapters = Parser.DefaultChapter, Filename = "Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -152,7 +152,7 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
Chapters = Parser.DefaultChapter, Filename = "Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -160,14 +160,14 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Dorohedoro", Volumes = "1", Edition = "",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
Chapters = Parser.DefaultChapter, Filename = "Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "APOSIMZ", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "40", Filename = "APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -175,7 +175,7 @@ public class DefaultParserTests
filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "9", Filename = "Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -183,7 +183,7 @@ public class DefaultParserTests
filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "Goblin Slayer - Brand New Day", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "6.5", Filename = "Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -191,15 +191,15 @@ public class DefaultParserTests
filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
Series = "Summer Time Rendering", Volumes = Parser.SpecialVolume, Edition = "",
Chapters = Parser.DefaultChapter, Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = true
});
filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "Seraph of the End - Vampire Reign", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
@ -227,7 +227,7 @@ public class DefaultParserTests
filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz";
expected.Add(filepath, new ParserInfo
{
Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "The Beginning After the End", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Chapter 001.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
@ -236,7 +236,7 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
Chapters = Parser.DefaultChapter, Filename = "Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
@ -244,7 +244,7 @@ public class DefaultParserTests
expected.Add(filepath, new ParserInfo
{
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
Chapters = Parser.DefaultChapter, Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
FullFilePath = filepath, IsSpecial = false
});
@ -285,7 +285,7 @@ public class DefaultParserTests
var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg";
var expectedInfo2 = new ParserInfo
{
Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "Monster #8", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
FullFilePath = filepath, IsSpecial = false
};
@ -407,7 +407,7 @@ public class DefaultParserTests
filepath = @"E:/Manga/Foo 50/Specials/Foo 50 SP01.cbz";
expected = new ParserInfo
{
Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
Series = "Foo 50", Volumes = Parser.SpecialVolume, IsSpecial = true,
Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
};
@ -442,8 +442,8 @@ public class DefaultParserTests
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Teen Titans", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume,
Chapters = API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter, Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
Series = "Teen Titans", Volumes = Parser.SpecialVolume,
Chapters = Parser.DefaultChapter, Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath
});
@ -451,7 +451,7 @@ public class DefaultParserTests
filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "Babe", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
@ -467,7 +467,7 @@ public class DefaultParserTests
filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
Series = "Batman - The Man Who Laughs", Volumes = Parser.LooseLeafVolume, Edition = "",
Chapters = "1", Filename = "Batman - The Man Who Laughs #1 (2005).cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});

View File

@ -78,6 +78,16 @@ public class MangaParsingTests
[InlineData("Accel World Volume 2", "2")]
[InlineData("Nagasarete Airantou - Vol. 30 Ch. 187.5 - Vol.31 Omake", "30")]
[InlineData("Zom 100 - Bucket List of the Dead v01", "1")]
// Tome Tests
[InlineData("Daredevil - t6 - 10 - (2019)", "6")]
[InlineData("Batgirl T2000 #57", "2000")]
[InlineData("Teen Titans t1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
[InlineData("Conquistador_Tome_2", "2")]
[InlineData("Max_l_explorateur-_Tome_0", "0")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
[InlineData("Adventure Time (2012)/Adventure Time Ch 1 (2012)", Parser.LooseLeafVolume)]
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
[InlineData("Monster Ch. 001 [MangaPlus] [Digital] [amit34521]", Parser.LooseLeafVolume)]
public void ParseVolumeTest(string filename, string expected)
{
Assert.Equal(expected, Parser.ParseVolume(filename, LibraryType.Manga));
@ -248,6 +258,11 @@ public class MangaParsingTests
[InlineData("[218565]-(C92) [BRIO (Puyocha)] Mika-nee no Tanryoku Shidou - Mika s Guide to Self-Confidence (THE IDOLM@STE", "")]
[InlineData("Monster #8 Ch. 001", "Monster #8")]
[InlineData("Zom 100 - Bucket List of the Dead v01", "Zom 100 - Bucket List of the Dead")]
[InlineData("Zom 100 - Tome 2", "Zom 100")]
[InlineData("Max_l_explorateur Tome 0", "Max l explorateur")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "Chevaliers d'Héliopolis")]
[InlineData("Bd Fr-Aldebaran-Antares-t6", "Bd Fr-Aldebaran-Antares")]
[InlineData("Monster Ch. 001 [MangaPlus] [Digital] [amit34521]", "Monster")]
public void ParseSeriesTest(string filename, string expected)
{
Assert.Equal(expected, Parser.ParseSeries(filename, LibraryType.Manga));
@ -341,6 +356,7 @@ public class MangaParsingTests
[InlineData("Max Level Returner ตอนที่ 5", "5")]
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
[InlineData("Monster #8 Ch. 001", "1")]
[InlineData("Monster Ch. 001 [MangaPlus] [Digital] [amit34521]", "1")]
public void ParseChaptersTest(string filename, string expected)
{
Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Manga));
@ -391,6 +407,7 @@ public class MangaParsingTests
{
Assert.Equal(expected, Parser.ParseEdition(input));
}
[Theory]
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
[InlineData("Beelzebub_Omake_June_2012_RHS", false)]

View File

@ -2,28 +2,21 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Metadata.Browse;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Metadata;
using API.Helpers;
using API.Helpers.Builders;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Repository;
public class GenreRepositoryTests : AbstractDbTest
public class GenreRepositoryTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private AppUser _fullAccess;
private AppUser _restrictedAccess;
private AppUser _restrictedAgeAccess;
protected override async Task ResetDb()
{
Context.Genre.RemoveRange(Context.Genre);
Context.Library.RemoveRange(Context.Library);
await Context.SaveChangesAsync();
}
private TestGenreSet CreateTestGenres()
{
@ -42,36 +35,35 @@ public class GenreRepositoryTests : AbstractDbTest
};
}
private async Task SeedDbWithGenres(TestGenreSet genres)
private async Task<(AppUser, AppUser, AppUser)> Setup(DataContext context, TestGenreSet genres)
{
await CreateTestUsers();
await AddGenresToContext(genres);
await CreateLibrariesWithGenres(genres);
await AssignLibrariesToUsers();
var fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
var restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
var restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
context.Users.Add(fullAccess);
context.Users.Add(restrictedAccess);
context.Users.Add(restrictedAgeAccess);
await context.SaveChangesAsync();
await AddGenresToContext(context, genres);
await CreateLibrariesWithGenres(context, genres);
await AssignLibrariesToUsers(context, fullAccess, restrictedAccess, restrictedAgeAccess);
return (fullAccess, restrictedAccess, restrictedAgeAccess);
}
private async Task CreateTestUsers()
{
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
Context.Users.Add(_fullAccess);
Context.Users.Add(_restrictedAccess);
Context.Users.Add(_restrictedAgeAccess);
await Context.SaveChangesAsync();
}
private async Task AddGenresToContext(TestGenreSet genres)
private async Task AddGenresToContext(DataContext context, TestGenreSet genres)
{
var allGenres = genres.GetAllGenres();
Context.Genre.AddRange(allGenres);
await Context.SaveChangesAsync();
context.Genre.AddRange(allGenres);
await context.SaveChangesAsync();
}
private async Task CreateLibrariesWithGenres(TestGenreSet genres)
private async Task CreateLibrariesWithGenres(DataContext context, TestGenreSet genres)
{
var lib0 = new LibraryBuilder("lib0")
.WithSeries(new SeriesBuilder("lib0-s0")
@ -120,22 +112,22 @@ public class GenreRepositoryTests : AbstractDbTest
.Build())
.Build();
Context.Library.Add(lib0);
Context.Library.Add(lib1);
await Context.SaveChangesAsync();
context.Library.Add(lib0);
context.Library.Add(lib1);
await context.SaveChangesAsync();
}
private async Task AssignLibrariesToUsers()
private async Task AssignLibrariesToUsers(DataContext context, AppUser fullAccess, AppUser restrictedAccess, AppUser restrictedAgeAccess)
{
var lib0 = Context.Library.First(l => l.Name == "lib0");
var lib1 = Context.Library.First(l => l.Name == "lib1");
var lib0 = context.Library.First(l => l.Name == "lib0");
var lib1 = context.Library.First(l => l.Name == "lib1");
_fullAccess.Libraries.Add(lib0);
_fullAccess.Libraries.Add(lib1);
_restrictedAccess.Libraries.Add(lib1);
_restrictedAgeAccess.Libraries.Add(lib1);
fullAccess.Libraries.Add(lib0);
fullAccess.Libraries.Add(lib1);
restrictedAccess.Libraries.Add(lib1);
restrictedAgeAccess.Libraries.Add(lib1);
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
}
private static Predicate<BrowseGenreDto> ContainsGenreCheck(Genre genre)
@ -159,15 +151,14 @@ public class GenreRepositoryTests : AbstractDbTest
}
[Fact]
public async Task GetBrowseableGenre_FullAccess_ReturnsAllGenresWithCorrectCounts()
public async Task GetBrowseableGenrefullAccess_ReturnsAllGenresWithCorrectCounts()
{
// Arrange
await ResetDb();
var genres = CreateTestGenres();
await SeedDbWithGenres(genres);
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context, genres);
// Act
var fullAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_fullAccess.Id, new UserParams());
var fullAccessGenres = await unitOfWork.GenreRepository.GetBrowseableGenre(fullAccess.Id, new UserParams());
// Assert
Assert.Equal(genres.GetAllGenres().Count, fullAccessGenres.TotalCount);
@ -186,13 +177,12 @@ public class GenreRepositoryTests : AbstractDbTest
[Fact]
public async Task GetBrowseableGenre_RestrictedAccess_ReturnsOnlyAccessibleGenres()
{
// Arrange
await ResetDb();
var genres = CreateTestGenres();
await SeedDbWithGenres(genres);
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context, genres);
// Act
var restrictedAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAccess.Id, new UserParams());
var restrictedAccessGenres = await unitOfWork.GenreRepository.GetBrowseableGenre(restrictedAccess.Id, new UserParams());
// Assert - Should see: 3 shared + 4 library 1 specific = 7 genres
Assert.Equal(7, restrictedAccessGenres.TotalCount);
@ -222,13 +212,12 @@ public class GenreRepositoryTests : AbstractDbTest
[Fact]
public async Task GetBrowseableGenre_RestrictedAgeAccess_FiltersAgeRestrictedContent()
{
// Arrange
await ResetDb();
var genres = CreateTestGenres();
await SeedDbWithGenres(genres);
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context, genres);
// Act
var restrictedAgeAccessGenres = await UnitOfWork.GenreRepository.GetBrowseableGenre(_restrictedAgeAccess.Id, new UserParams());
var restrictedAgeAccessGenres = await unitOfWork.GenreRepository.GetBrowseableGenre(restrictedAgeAccess.Id, new UserParams());
// Assert - Should see: 3 shared + 3 lib1 specific = 6 genres (age-restricted genre filtered out)
Assert.Equal(6, restrictedAgeAccessGenres.TotalCount);

View File

@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Metadata.Browse;
using API.DTOs.Metadata.Browse.Requests;
using API.Entities;
@ -9,51 +10,44 @@ using API.Entities.Enums;
using API.Entities.Person;
using API.Helpers;
using API.Helpers.Builders;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Repository;
public class PersonRepositoryTests : AbstractDbTest
public class PersonRepositoryTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private AppUser _fullAccess;
private AppUser _restrictedAccess;
private AppUser _restrictedAgeAccess;
protected override async Task ResetDb()
private async Task<(AppUser, AppUser, AppUser)> Setup(DataContext context)
{
Context.Person.RemoveRange(Context.Person.ToList());
Context.Library.RemoveRange(Context.Library.ToList());
Context.AppUser.RemoveRange(Context.AppUser.ToList());
await UnitOfWork.CommitAsync();
}
var fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
var restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
var restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
private async Task SeedDb()
{
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
Context.AppUser.Add(_fullAccess);
Context.AppUser.Add(_restrictedAccess);
Context.AppUser.Add(_restrictedAgeAccess);
await Context.SaveChangesAsync();
context.AppUser.Add(fullAccess);
context.AppUser.Add(restrictedAccess);
context.AppUser.Add(restrictedAgeAccess);
await context.SaveChangesAsync();
var people = CreateTestPeople();
Context.Person.AddRange(people);
await Context.SaveChangesAsync();
context.Person.AddRange(people);
await context.SaveChangesAsync();
var libraries = CreateTestLibraries(people);
Context.Library.AddRange(libraries);
await Context.SaveChangesAsync();
var libraries = CreateTestLibraries(context, people);
context.Library.AddRange(libraries);
await context.SaveChangesAsync();
_fullAccess.Libraries.Add(libraries[0]); // lib0
_fullAccess.Libraries.Add(libraries[1]); // lib1
_restrictedAccess.Libraries.Add(libraries[1]); // lib1 only
_restrictedAgeAccess.Libraries.Add(libraries[1]); // lib1 only
fullAccess.Libraries.Add(libraries[0]); // lib0
fullAccess.Libraries.Add(libraries[1]); // lib1
restrictedAccess.Libraries.Add(libraries[1]); // lib1 only
restrictedAgeAccess.Libraries.Add(libraries[1]); // lib1 only
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
return (fullAccess, restrictedAccess, restrictedAgeAccess);
}
private static List<Person> CreateTestPeople()
@ -73,7 +67,7 @@ public class PersonRepositoryTests : AbstractDbTest
};
}
private static List<Library> CreateTestLibraries(List<Person> people)
private static List<Library> CreateTestLibraries(DataContext context, List<Person> people)
{
var lib0 = new LibraryBuilder("lib0")
.WithSeries(new SeriesBuilder("lib0-s0")
@ -158,9 +152,9 @@ public class PersonRepositoryTests : AbstractDbTest
return people.First(p => p.Name == name);
}
private Person GetPersonByName(string name)
private Person GetPersonByName(DataContext context, string name)
{
return Context.Person.First(p => p.Name == name);
return context.Person.First(p => p.Name == name);
}
private static Predicate<BrowsePersonDto> ContainsPersonCheck(Person person)
@ -171,18 +165,18 @@ public class PersonRepositoryTests : AbstractDbTest
[Fact]
public async Task GetBrowsePersonDtos()
{
await ResetDb();
await SeedDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context);
// Get people from database for assertions
var sharedSeriesChaptersPerson = GetPersonByName("Shared Series Chapter Person");
var lib0SeriesPerson = GetPersonByName("Lib0 Series Person");
var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
var allPeople = Context.Person.ToList();
var sharedSeriesChaptersPerson = GetPersonByName(context, "Shared Series Chapter Person");
var lib0SeriesPerson = GetPersonByName(context, "Lib0 Series Person");
var lib1SeriesPerson = GetPersonByName(context, "Lib1 Series Person");
var lib1ChapterAgePerson = GetPersonByName(context, "Lib1 Chapter Age Person");
var allPeople = context.Person.ToList();
var fullAccessPeople =
await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_fullAccess.Id, new BrowsePersonFilterDto(),
await unitOfWork.PersonRepository.GetBrowsePersonDtos(fullAccess.Id, new BrowsePersonFilterDto(),
new UserParams());
Assert.Equal(allPeople.Count, fullAccessPeople.TotalCount);
@ -199,18 +193,18 @@ public class PersonRepositoryTests : AbstractDbTest
Assert.Equal(2, fullAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
var restrictedAccessPeople =
await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAccess.Id, new BrowsePersonFilterDto(),
await unitOfWork.PersonRepository.GetBrowsePersonDtos(restrictedAccess.Id, new BrowsePersonFilterDto(),
new UserParams());
Assert.Equal(7, restrictedAccessPeople.TotalCount);
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Chapter Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Series Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Shared Chapters Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Chapter Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Series Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapters Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName("Lib1 Chapter Age Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Shared Series Chapter Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Shared Series Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Shared Chapters Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Lib1 Series Chapter Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Lib1 Series Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Lib1 Chapters Person")));
Assert.Contains(restrictedAccessPeople, ContainsPersonCheck(GetPersonByName(context, "Lib1 Chapter Age Person")));
// 2 series in lib1, no series in lib0
Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == sharedSeriesChaptersPerson.Id).SeriesCount);
@ -219,7 +213,7 @@ public class PersonRepositoryTests : AbstractDbTest
// 2 series in lib1
Assert.Equal(2, restrictedAccessPeople.First(dto => dto.Id == lib1SeriesPerson.Id).SeriesCount);
var restrictedAgeAccessPeople = await UnitOfWork.PersonRepository.GetBrowsePersonDtos(_restrictedAgeAccess.Id,
var restrictedAgeAccessPeople = await unitOfWork.PersonRepository.GetBrowsePersonDtos(restrictedAgeAccess.Id,
new BrowsePersonFilterDto(), new UserParams());
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
@ -232,30 +226,30 @@ public class PersonRepositoryTests : AbstractDbTest
[Fact]
public async Task GetRolesForPersonByName()
{
await ResetDb();
await SeedDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context);
var sharedSeriesPerson = GetPersonByName("Shared Series Person");
var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
var lib1ChapterAgePerson = GetPersonByName("Lib1 Chapter Age Person");
var sharedSeriesPerson = GetPersonByName(context, "Shared Series Person");
var sharedChaptersPerson = GetPersonByName(context, "Shared Chapters Person");
var lib1ChapterAgePerson = GetPersonByName(context, "Lib1 Chapter Age Person");
var sharedSeriesRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _fullAccess.Id);
var chapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _fullAccess.Id);
var ageChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _fullAccess.Id);
var sharedSeriesRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, fullAccess.Id);
var chapterRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, fullAccess.Id);
var ageChapterRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, fullAccess.Id);
Assert.Equal(3, sharedSeriesRoles.Count());
Assert.Equal(6, chapterRoles.Count());
Assert.Single(ageChapterRoles);
var restrictedRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAccess.Id);
var restrictedChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAccess.Id);
var restrictedAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAccess.Id);
var restrictedRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, restrictedAccess.Id);
var restrictedChapterRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, restrictedAccess.Id);
var restrictedAgePersonChapterRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, restrictedAccess.Id);
Assert.Equal(2, restrictedRoles.Count());
Assert.Equal(4, restrictedChapterRoles.Count());
Assert.Single(restrictedAgePersonChapterRoles);
var restrictedAgeRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
var restrictedAgeChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, _restrictedAgeAccess.Id);
var restrictedAgeAgePersonChapterRoles = await UnitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, _restrictedAgeAccess.Id);
var restrictedAgeRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(sharedSeriesPerson.Id, restrictedAgeAccess.Id);
var restrictedAgeChapterRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(sharedChaptersPerson.Id, restrictedAgeAccess.Id);
var restrictedAgeAgePersonChapterRoles = await unitOfWork.PersonRepository.GetRolesForPersonByName(lib1ChapterAgePerson.Id, restrictedAgeAccess.Id);
Assert.Single(restrictedAgeRoles);
Assert.Equal(2, restrictedAgeChapterRoles.Count());
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
@ -265,76 +259,76 @@ public class PersonRepositoryTests : AbstractDbTest
[Fact]
public async Task GetPersonDtoByName()
{
await ResetDb();
await SeedDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context);
var allPeople = Context.Person.ToList();
var allPeople = context.Person.ToList();
foreach (var person in allPeople)
{
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName(person.Name, _fullAccess.Id));
Assert.NotNull(await unitOfWork.PersonRepository.GetPersonDtoByName(person.Name, fullAccess.Id));
}
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAccess.Id));
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Shared Series Person", _restrictedAccess.Id));
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAccess.Id));
Assert.Null(await unitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", restrictedAccess.Id));
Assert.NotNull(await unitOfWork.PersonRepository.GetPersonDtoByName("Shared Series Person", restrictedAccess.Id));
Assert.NotNull(await unitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", restrictedAccess.Id));
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", _restrictedAgeAccess.Id));
Assert.NotNull(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", _restrictedAgeAccess.Id));
Assert.Null(await unitOfWork.PersonRepository.GetPersonDtoByName("Lib0 Chapters Person", restrictedAgeAccess.Id));
Assert.NotNull(await unitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Series Person", restrictedAgeAccess.Id));
// Note: There is a potential bug here where a person in a different chapter of an age restricted series will show up
Assert.Null(await UnitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Chapter Age Person", _restrictedAgeAccess.Id));
Assert.Null(await unitOfWork.PersonRepository.GetPersonDtoByName("Lib1 Chapter Age Person", restrictedAgeAccess.Id));
}
[Fact]
public async Task GetSeriesKnownFor()
{
await ResetDb();
await SeedDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context);
var sharedSeriesPerson = GetPersonByName("Shared Series Person");
var lib1SeriesPerson = GetPersonByName("Lib1 Series Person");
var sharedSeriesPerson = GetPersonByName(context, "Shared Series Person");
var lib1SeriesPerson = GetPersonByName(context, "Lib1 Series Person");
var series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _fullAccess.Id);
var series = await unitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, fullAccess.Id);
Assert.Equal(3, series.Count());
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAccess.Id);
series = await unitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, restrictedAccess.Id);
Assert.Equal(2, series.Count());
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, _restrictedAgeAccess.Id);
series = await unitOfWork.PersonRepository.GetSeriesKnownFor(sharedSeriesPerson.Id, restrictedAgeAccess.Id);
Assert.Single(series);
series = await UnitOfWork.PersonRepository.GetSeriesKnownFor(lib1SeriesPerson.Id, _restrictedAgeAccess.Id);
series = await unitOfWork.PersonRepository.GetSeriesKnownFor(lib1SeriesPerson.Id, restrictedAgeAccess.Id);
Assert.Single(series);
}
[Fact]
public async Task GetChaptersForPersonByRole()
{
await ResetDb();
await SeedDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (fullAccess, restrictedAccess, restrictedAgeAccess) = await Setup(context);
var sharedChaptersPerson = GetPersonByName("Shared Chapters Person");
var sharedChaptersPerson = GetPersonByName(context, "Shared Chapters Person");
// Lib0
var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Colorist);
var restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Colorist);
var restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Colorist);
var chapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, fullAccess.Id, PersonRole.Colorist);
var restrictedChapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, restrictedAccess.Id, PersonRole.Colorist);
var restrictedAgeChapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, restrictedAgeAccess.Id, PersonRole.Colorist);
Assert.Single(chapters);
Assert.Empty(restrictedChapters);
Assert.Empty(restrictedAgeChapters);
// Lib1 - age restricted series
chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Imprint);
restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Imprint);
restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Imprint);
chapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, fullAccess.Id, PersonRole.Imprint);
restrictedChapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, restrictedAccess.Id, PersonRole.Imprint);
restrictedAgeChapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, restrictedAgeAccess.Id, PersonRole.Imprint);
Assert.Single(chapters);
Assert.Single(restrictedChapters);
Assert.Empty(restrictedAgeChapters);
// Lib1 - not age restricted series
chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _fullAccess.Id, PersonRole.Team);
restrictedChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAccess.Id, PersonRole.Team);
restrictedAgeChapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, _restrictedAgeAccess.Id, PersonRole.Team);
chapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, fullAccess.Id, PersonRole.Team);
restrictedChapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, restrictedAccess.Id, PersonRole.Team);
restrictedAgeChapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(sharedChaptersPerson.Id, restrictedAgeAccess.Id, PersonRole.Team);
Assert.Single(chapters);
Assert.Single(restrictedChapters);
Assert.Single(restrictedAgeChapters);

View File

@ -2,28 +2,21 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Metadata.Browse;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Metadata;
using API.Helpers;
using API.Helpers.Builders;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Repository;
public class TagRepositoryTests : AbstractDbTest
public class TagRepositoryTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private AppUser _fullAccess;
private AppUser _restrictedAccess;
private AppUser _restrictedAgeAccess;
protected override async Task ResetDb()
{
Context.Tag.RemoveRange(Context.Tag);
Context.Library.RemoveRange(Context.Library);
await Context.SaveChangesAsync();
}
private TestTagSet CreateTestTags()
{
@ -42,36 +35,46 @@ public class TagRepositoryTests : AbstractDbTest
};
}
private async Task SeedDbWithTags(TestTagSet tags)
private async Task<(AppUser, AppUser, AppUser)> SeedDbWithTags(DataContext context, TestTagSet tags)
{
await CreateTestUsers();
await AddTagsToContext(tags);
await CreateLibrariesWithTags(tags);
await AssignLibrariesToUsers();
await AddTagsTocontext(context, tags);
await CreateLibrariesWithTags(context, tags);
return await CreateTestUsers(context);
}
private async Task CreateTestUsers()
private async Task<(AppUser, AppUser, AppUser)> CreateTestUsers(DataContext context)
{
_fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
_restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
_restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
_restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
_restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
var fullAccess = new AppUserBuilder("amelia", "amelia@example.com").Build();
var restrictedAccess = new AppUserBuilder("mila", "mila@example.com").Build();
var restrictedAgeAccess = new AppUserBuilder("eva", "eva@example.com").Build();
restrictedAgeAccess.AgeRestriction = AgeRating.Teen;
restrictedAgeAccess.AgeRestrictionIncludeUnknowns = true;
Context.Users.Add(_fullAccess);
Context.Users.Add(_restrictedAccess);
Context.Users.Add(_restrictedAgeAccess);
await Context.SaveChangesAsync();
context.Users.Add(fullAccess);
context.Users.Add(restrictedAccess);
context.Users.Add(restrictedAgeAccess);
var lib0 = context.Library.First(l => l.Name == "lib0");
var lib1 = context.Library.First(l => l.Name == "lib1");
fullAccess.Libraries.Add(lib0);
fullAccess.Libraries.Add(lib1);
restrictedAccess.Libraries.Add(lib1);
restrictedAgeAccess.Libraries.Add(lib1);
await context.SaveChangesAsync();
return (fullAccess, restrictedAccess, restrictedAgeAccess);
}
private async Task AddTagsToContext(TestTagSet tags)
private async Task AddTagsTocontext(DataContext context, TestTagSet tags)
{
var allTags = tags.GetAllTags();
Context.Tag.AddRange(allTags);
await Context.SaveChangesAsync();
context.Tag.AddRange(allTags);
await context.SaveChangesAsync();
}
private async Task CreateLibrariesWithTags(TestTagSet tags)
private async Task CreateLibrariesWithTags(DataContext context, TestTagSet tags)
{
var lib0 = new LibraryBuilder("lib0")
.WithSeries(new SeriesBuilder("lib0-s0")
@ -122,22 +125,9 @@ public class TagRepositoryTests : AbstractDbTest
.Build())
.Build();
Context.Library.Add(lib0);
Context.Library.Add(lib1);
await Context.SaveChangesAsync();
}
private async Task AssignLibrariesToUsers()
{
var lib0 = Context.Library.First(l => l.Name == "lib0");
var lib1 = Context.Library.First(l => l.Name == "lib1");
_fullAccess.Libraries.Add(lib0);
_fullAccess.Libraries.Add(lib1);
_restrictedAccess.Libraries.Add(lib1);
_restrictedAgeAccess.Libraries.Add(lib1);
await Context.SaveChangesAsync();
context.Library.Add(lib0);
context.Library.Add(lib1);
await context.SaveChangesAsync();
}
private static Predicate<BrowseTagDto> ContainsTagCheck(Tag tag)
@ -163,13 +153,12 @@ public class TagRepositoryTests : AbstractDbTest
[Fact]
public async Task GetBrowseableTag_FullAccess_ReturnsAllTagsWithCorrectCounts()
{
// Arrange
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var tags = CreateTestTags();
await SeedDbWithTags(tags);
var (fullAccess, _, _) = await SeedDbWithTags(context, tags);
// Act
var fullAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_fullAccess.Id, new UserParams());
var fullAccessTags = await unitOfWork.TagRepository.GetBrowseableTag(fullAccess.Id, new UserParams());
// Assert
Assert.Equal(tags.GetAllTags().Count, fullAccessTags.TotalCount);
@ -188,13 +177,12 @@ public class TagRepositoryTests : AbstractDbTest
[Fact]
public async Task GetBrowseableTag_RestrictedAccess_ReturnsOnlyAccessibleTags()
{
// Arrange
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var tags = CreateTestTags();
await SeedDbWithTags(tags);
var (_, restrictedAccess, _) = await SeedDbWithTags(context, tags);
// Act
var restrictedAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAccess.Id, new UserParams());
var restrictedAccessTags = await unitOfWork.TagRepository.GetBrowseableTag(restrictedAccess.Id, new UserParams());
// Assert - Should see: 3 shared + 4 library 1 specific = 7 tags
Assert.Equal(7, restrictedAccessTags.TotalCount);
@ -223,13 +211,12 @@ public class TagRepositoryTests : AbstractDbTest
[Fact]
public async Task GetBrowseableTag_RestrictedAgeAccess_FiltersAgeRestrictedContent()
{
// Arrange
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var tags = CreateTestTags();
await SeedDbWithTags(tags);
var (_, _, restrictedAgeAccess) = await SeedDbWithTags(context, tags);
// Act
var restrictedAgeAccessTags = await UnitOfWork.TagRepository.GetBrowseableTag(_restrictedAgeAccess.Id, new UserParams());
var restrictedAgeAccessTags = await unitOfWork.TagRepository.GetBrowseableTag(restrictedAgeAccess.Id, new UserParams());
// Assert - Should see: 3 shared + 3 lib1 specific = 6 tags (age-restricted tag filtered out)
Assert.Equal(6, restrictedAgeAccessTags.TotalCount);

View File

@ -0,0 +1,296 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Constants;
using API.Data;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner;
using AutoMapper;
using Kavita.Common;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class AccountServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
[Theory]
[InlineData("admin", true)]
[InlineData("^^$SomeBadChars", false)]
[InlineData("Lisa2003", true)]
[InlineData("Kraft Lawrance", false)]
public async Task ValidateUsername_Regex(string username, bool valid)
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (_, accountService, _, _) = await Setup(unitOfWork, context, mapper);
Assert.Equal(valid, !(await accountService.ValidateUsername(username)).Any());
}
[Fact]
public async Task ChangeIdentityProvider_Throws_WhenDefaultAdminUser()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (_, accountService, _, _) = await Setup(unitOfWork, context, mapper);
var defaultAdmin = await unitOfWork.UserRepository.GetDefaultAdminUser();
await Assert.ThrowsAsync<KavitaException>(() =>
accountService.ChangeIdentityProvider(defaultAdmin.Id, defaultAdmin, IdentityProvider.Kavita));
}
[Fact]
public async Task ChangeIdentityProvider_Succeeds_WhenSyncUserSettingsIsFalse()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, _) = await Setup(unitOfWork, context, mapper);
var result = await accountService.ChangeIdentityProvider(user.Id, user, IdentityProvider.Kavita);
Assert.False(result);
var updated = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(updated);
Assert.Equal(IdentityProvider.Kavita, updated.IdentityProvider);
}
[Fact]
public async Task ChangeIdentityProvider_Throws_WhenUserIsOidcManaged_AndNoChange()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, settingsService) = await Setup(unitOfWork, context, mapper);
user.IdentityProvider = IdentityProvider.OpenIdConnect;
await unitOfWork.CommitAsync();
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
settings.OidcConfig.SyncUserSettings = true;
await settingsService.UpdateSettings(settings);
await Assert.ThrowsAsync<KavitaException>(() =>
accountService.ChangeIdentityProvider(user.Id, user, IdentityProvider.OpenIdConnect));
}
[Fact]
public async Task ChangeIdentityProvider_Succeeds_WhenSyncUserSettingsTrue_AndChangeIsAllowed()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, settingsService) = await Setup(unitOfWork, context, mapper);
user.IdentityProvider = IdentityProvider.OpenIdConnect;
await unitOfWork.CommitAsync();
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
settings.OidcConfig.SyncUserSettings = true;
await settingsService.UpdateSettings(settings);
var result = await accountService.ChangeIdentityProvider(user.Id, user, IdentityProvider.Kavita);
Assert.False(result);
var updated = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(updated);
Assert.Equal(IdentityProvider.Kavita, updated.IdentityProvider);
}
[Fact]
public async Task ChangeIdentityProvider_ReturnsTrue_WhenChangedToOidc()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, settingsService) = await Setup(unitOfWork, context, mapper);
user.IdentityProvider = IdentityProvider.Kavita;
await unitOfWork.CommitAsync();
var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
settings.OidcConfig.SyncUserSettings = true;
await settingsService.UpdateSettings(settings);
var result = await accountService.ChangeIdentityProvider(user.Id, user, IdentityProvider.OpenIdConnect);
Assert.True(result);
var updated = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(updated);
Assert.Equal(IdentityProvider.OpenIdConnect, updated.IdentityProvider);
}
[Fact]
public async Task UpdateLibrariesForUser_GrantsAccessToAllLibraries_WhenAdmin()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, _) = await Setup(unitOfWork, context, mapper);;
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
var allLibs = await unitOfWork.LibraryRepository.GetLibrariesAsync();
var maxCount = allLibs.Count();
await accountService.UpdateLibrariesForUser(user, new List<int>(), hasAdminRole: true);
await unitOfWork.CommitAsync();
var userLibs = await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id);
Assert.Equal(maxCount, userLibs.Count());
}
[Fact]
public async Task UpdateLibrariesForUser_GrantsAccessToSelectedLibraries_WhenNotAdmin()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, _) = await Setup(unitOfWork, context, mapper);;
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
await accountService.UpdateLibrariesForUser(user, new List<int> { mangaLib.Id }, hasAdminRole: false);
await unitOfWork.CommitAsync();
var userLibs = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList();
Assert.Single(userLibs);
Assert.Equal(mangaLib.Id, userLibs.First().Id);
}
[Fact]
public async Task UpdateLibrariesForUser_RemovesAccessFromUnselectedLibraries_WhenNotAdmin()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, _) = await Setup(unitOfWork, context, mapper);;
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
// Grant access to both libraries
await accountService.UpdateLibrariesForUser(user, new List<int> { mangaLib.Id, lightNovelsLib.Id }, hasAdminRole: false);
await unitOfWork.CommitAsync();
var userLibs = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList();
Assert.Equal(2, userLibs.Count);
// Now restrict access to only light novels
await accountService.UpdateLibrariesForUser(user, new List<int> { lightNovelsLib.Id }, hasAdminRole: false);
await unitOfWork.CommitAsync();
userLibs = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList();
Assert.Single(userLibs);
Assert.Equal(lightNovelsLib.Id, userLibs.First().Id);
}
[Fact]
public async Task UpdateLibrariesForUser_GrantsNoLibraries_WhenNoneSelected_AndNotAdmin()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (user, accountService, _, _) = await Setup(unitOfWork, context, mapper);;
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
// Initially grant access to both libraries
await accountService.UpdateLibrariesForUser(user, new List<int> { mangaLib.Id, lightNovelsLib.Id }, hasAdminRole: false);
await unitOfWork.CommitAsync();
var userLibs = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList();
Assert.Equal(2, userLibs.Count);
// Now revoke all access by passing empty list
await accountService.UpdateLibrariesForUser(user, new List<int>(), hasAdminRole: false);
await unitOfWork.CommitAsync();
userLibs = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList();
Assert.Empty(userLibs);
}
private static async Task<(AppUser, IAccountService, UserManager<AppUser>, SettingsService)> Setup(IUnitOfWork unitOfWork, DataContext context, IMapper mapper)
{
var defaultAdmin = new AppUserBuilder("defaultAdmin", "defaultAdmin@localhost")
.WithRole(PolicyConstants.AdminRole)
.Build();
var user = new AppUserBuilder("amelia", "amelia@localhost").Build();
var roleStore = new RoleStore<
AppRole,
DataContext,
int,
IdentityUserRole<int>,
IdentityRoleClaim<int>
>(context);
var roleManager = new RoleManager<AppRole>(
roleStore,
[new RoleValidator<AppRole>()],
new UpperInvariantLookupNormalizer(),
new IdentityErrorDescriber(),
Substitute.For<ILogger<RoleManager<AppRole>>>());
foreach (var role in PolicyConstants.ValidRoles)
{
if (!await roleManager.RoleExistsAsync(role))
{
await roleManager.CreateAsync(new AppRole
{
Name = role,
});
}
}
var userStore = new UserStore<
AppUser,
AppRole,
DataContext,
int,
IdentityUserClaim<int>,
AppUserRole,
IdentityUserLogin<int>,
IdentityUserToken<int>,
IdentityRoleClaim<int>
>(context);
var userManager = new UserManager<AppUser>(userStore,
new OptionsWrapper<IdentityOptions>(new IdentityOptions()),
new PasswordHasher<AppUser>(),
[new UserValidator<AppUser>()],
[new PasswordValidator<AppUser>()],
new UpperInvariantLookupNormalizer(),
new IdentityErrorDescriber(),
null!,
Substitute.For<ILogger<UserManager<AppUser>>>());
// Create users with the UserManager such that the SecurityStamp is set
await userManager.CreateAsync(user);
await userManager.CreateAsync(defaultAdmin);
var accountService = new AccountService(userManager, Substitute.For<ILogger<AccountService>>(), unitOfWork, mapper, Substitute.For<ILocalizationService>());
var settingsService = new SettingsService(unitOfWork, Substitute.For<IDirectoryService>(), Substitute.For<ILibraryWatcher>(), Substitute.For<ITaskScheduler>(), Substitute.For<ILogger<SettingsService>> (), Substitute.For<IOidcService>());
user = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id, AppUserIncludes.SideNavStreams);
return (user, accountService, userManager, settingsService);
}
}

View File

@ -16,90 +16,29 @@ using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class BackupServiceTests: AbstractFsTest
public class BackupServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly ILogger<BackupService> _logger = Substitute.For<ILogger<BackupService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
private readonly IConfiguration _config;
private readonly DbConnection _connection;
private readonly DataContext _context;
public BackupServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
_config = Substitute.For<IConfiguration>();
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new LibraryBuilder("Manga")
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
#endregion
#region GetLogFiles
[Fact]
public void GetLogFiles_ExpectAllFiles_NoRollingFiles()
public async Task GetLogFiles_ExpectAllFiles_NoRollingFiles()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddFile($"{LogDirectory}kavita.log", new MockFileData(""));
filesystem.AddFile($"{LogDirectory}kavita1.log", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var backupService = new BackupService(_logger, _unitOfWork, ds, _messageHub);
var backupService = new BackupService(_logger, unitOfWork, ds, _messageHub);
var backupLogFiles = backupService.GetLogFiles(false).ToList();
Assert.Single(backupLogFiles);
@ -107,17 +46,19 @@ public class BackupServiceTests: AbstractFsTest
}
[Fact]
public void GetLogFiles_ExpectAllFiles_WithRollingFiles()
public async Task GetLogFiles_ExpectAllFiles_WithRollingFiles()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddFile($"{LogDirectory}kavita.log", new MockFileData(""));
filesystem.AddFile($"{LogDirectory}kavita20200213.log", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var backupService = new BackupService(_logger, _unitOfWork, ds, _messageHub);
var backupService = new BackupService(_logger, unitOfWork, ds, _messageHub);
var backupLogFiles = backupService.GetLogFiles().Select(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath).ToList();
Assert.NotEmpty(backupLogFiles.Where(file => file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita.log")) || file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita1.log"))));
Assert.Contains(backupLogFiles, file => file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita.log")) || file.Equals(API.Services.Tasks.Scanner.Parser.Parser.NormalizePath($"{LogDirectory}kavita1.log")));
}

View File

@ -19,95 +19,29 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class BookmarkServiceTests: AbstractFsTest
public class BookmarkServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly IUnitOfWork _unitOfWork;
private readonly DbConnection _connection;
private readonly DataContext _context;
public BookmarkServiceTests()
private BookmarkService Create(IDirectoryService ds, IUnitOfWork unitOfWork)
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
var mapper = config.CreateMapper();
_unitOfWork = new UnitOfWork(_context, mapper, null);
}
private BookmarkService Create(IDirectoryService ds)
{
return new BookmarkService(Substitute.For<ILogger<BookmarkService>>(), _unitOfWork, ds,
return new BookmarkService(Substitute.For<ILogger<BookmarkService>>(), unitOfWork, ds,
Substitute.For<IMediaConversionService>());
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BookmarkDirectory).SingleAsync();
setting.Value = BookmarkDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new LibraryBuilder("Manga")
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
_context.Users.RemoveRange(_context.Users.ToList());
_context.AppUserBookmark.RemoveRange(_context.AppUserBookmark.ToList());
await _context.SaveChangesAsync();
}
#endregion
#region BookmarkPage
[Fact]
public async Task BookmarkPage_ShouldCopyTheFileAndUpdateDB()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
var file = $"{CacheDirectory}1/0001.jpg";
filesystem.AddFile(file, new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -116,20 +50,20 @@ Substitute.For<IMediaConversionService>());
.Build())
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
context.Series.Add(series);
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "Joe"
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var bookmarkService = Create(ds);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var bookmarkService = Create(ds, unitOfWork);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var result = await bookmarkService.BookmarkPage(user, new BookmarkDto()
{
@ -142,19 +76,18 @@ Substitute.For<IMediaConversionService>());
Assert.True(result);
Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
Assert.NotNull(await unitOfWork.UserRepository.GetBookmarkAsync(1));
}
[Fact]
public async Task BookmarkPage_ShouldDeleteFileOnUnbookmark()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/1/0001.jpg", new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
.WithVolume(new VolumeBuilder("1")
@ -165,10 +98,10 @@ Substitute.For<IMediaConversionService>());
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
context.Series.Add(series);
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "Joe",
Bookmarks = new List<AppUserBookmark>()
@ -184,12 +117,12 @@ Substitute.For<IMediaConversionService>());
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var bookmarkService = Create(ds);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var bookmarkService = Create(ds, unitOfWork);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var result = await bookmarkService.RemoveBookmarkPage(user, new BookmarkDto()
{
@ -202,7 +135,7 @@ Substitute.For<IMediaConversionService>());
Assert.True(result);
Assert.Empty(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.Null(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
Assert.Null(await unitOfWork.UserRepository.GetBookmarkAsync(1));
}
#endregion
@ -212,15 +145,14 @@ Substitute.For<IMediaConversionService>());
[Fact]
public async Task DeleteBookmarkFiles_ShouldDeleteOnlyPassedFiles()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/1/1/0001.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/2/1/0002.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/2/1/0001.jpg", new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
.WithVolume(new VolumeBuilder("1")
@ -231,9 +163,9 @@ Substitute.For<IMediaConversionService>());
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
context.Series.Add(series);
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "Joe",
Bookmarks = new List<AppUserBookmark>()
@ -265,20 +197,22 @@ Substitute.For<IMediaConversionService>());
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var bookmarkService = Create(ds);
var bookmarkService = Create(ds, unitOfWork);
await bookmarkService.DeleteBookmarkFiles(new [] {new AppUserBookmark()
{
await bookmarkService.DeleteBookmarkFiles([
new AppUserBookmark
{
Page = 1,
ChapterId = 1,
FileName = $"1/1/1/0001.jpg",
SeriesId = 1,
VolumeId = 1
}});
}
]);
Assert.Equal(2, ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories).Count());
@ -294,8 +228,7 @@ Substitute.For<IMediaConversionService>());
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var (unitOfWork, context, _) = await CreateDatabase();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
@ -307,20 +240,20 @@ Substitute.For<IMediaConversionService>());
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
context.Series.Add(series);
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "Joe"
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var bookmarkService = Create(ds);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var bookmarkService = Create(ds, unitOfWork);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
await bookmarkService.BookmarkPage(user, new BookmarkDto()
{
@ -347,8 +280,7 @@ Substitute.For<IMediaConversionService>());
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/1/0001.jpg", new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var (unitOfWork, context, _) = await CreateDatabase();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
@ -360,9 +292,9 @@ Substitute.For<IMediaConversionService>());
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
context.Series.Add(series);
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "Joe",
Bookmarks = new List<AppUserBookmark>()
@ -378,19 +310,19 @@ Substitute.For<IMediaConversionService>());
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var vol = await _unitOfWork.VolumeRepository.GetVolumeAsync(1);
var vol = await unitOfWork.VolumeRepository.GetVolumeAsync(1);
vol.Chapters = new List<Chapter>();
_unitOfWork.VolumeRepository.Update(vol);
await _unitOfWork.CommitAsync();
unitOfWork.VolumeRepository.Update(vol);
await unitOfWork.CommitAsync();
Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
Assert.NotNull(await unitOfWork.UserRepository.GetBookmarkAsync(1));
}
@ -401,8 +333,7 @@ Substitute.For<IMediaConversionService>());
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/1/0001.jpg", new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var (unitOfWork, context, _) = await CreateDatabase();
var series = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
.WithVolume(new VolumeBuilder("1")
@ -413,10 +344,10 @@ Substitute.For<IMediaConversionService>());
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
_context.Series.Add(series);
context.Series.Add(series);
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "Joe",
Bookmarks = new List<AppUserBookmark>()
@ -432,19 +363,19 @@ Substitute.For<IMediaConversionService>());
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
Assert.NotEmpty(user!.Bookmarks);
series.Volumes = new List<Volume>();
_unitOfWork.SeriesRepository.Update(series);
await _unitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Update(series);
await unitOfWork.CommitAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
Assert.NotNull(await unitOfWork.UserRepository.GetBookmarkAsync(1));
}
#endregion

View File

@ -18,6 +18,7 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
@ -60,87 +61,27 @@ internal class MockReadingItemServiceForCacheService : IReadingItemService
throw new System.NotImplementedException();
}
}
public class CacheServiceTests: AbstractFsTest
public class CacheServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly ILogger<CacheService> _logger = Substitute.For<ILogger<CacheService>>();
private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub = Substitute.For<IHubContext<MessageHub>>();
private readonly DbConnection _connection;
private readonly DataContext _context;
public CacheServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
.Options;
_connection = RelationalOptionsExtension.Extract(contextOptions).Connection;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
_unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
public void Dispose() => _connection.Dispose();
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context, new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(new LibraryBuilder("Manga")
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDB()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
#endregion
#region Ensure
[Fact]
public async Task Ensure_DirectoryAlreadyExists_DontExtractAnything()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddFile($"{DataDirectory}Test v1.zip", new MockFileData(""));
filesystem.AddDirectory($"{CacheDirectory}1/");
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
var cleanupService = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(),
Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());
await ResetDB();
var s = new SeriesBuilder("Test").Build();
var v = new VolumeBuilder("1").Build();
var c = new ChapterBuilder("1")
@ -149,9 +90,9 @@ public class CacheServiceTests: AbstractFsTest
v.Chapters.Add(c);
s.Volumes.Add(v);
s.LibraryId = 1;
_context.Series.Add(s);
context.Series.Add(s);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
await cleanupService.Ensure(1);
Assert.Empty(ds.GetFiles(filesystem.Path.Join(CacheDirectory, "1"), searchOption:SearchOption.AllDirectories));
@ -203,15 +144,17 @@ public class CacheServiceTests: AbstractFsTest
#region CleanupChapters
[Fact]
public void CleanupChapters_AllFilesShouldBeDeleted()
public async Task CleanupChapters_AllFilesShouldBeDeleted()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{CacheDirectory}1/001.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}1/002.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}3/003.jpg", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CacheService(_logger, _unitOfWork, ds,
var cleanupService = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());
@ -226,14 +169,16 @@ public class CacheServiceTests: AbstractFsTest
#region GetCachedEpubFile
[Fact]
public void GetCachedEpubFile_ShouldReturnFirstEpub()
public async Task GetCachedEpubFile_ShouldReturnFirstEpub()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.epub", new MockFileData(""));
filesystem.AddFile($"{DataDirectory}2.epub", new MockFileData(""));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
var cs = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());
@ -251,8 +196,10 @@ public class CacheServiceTests: AbstractFsTest
#region GetCachedPagePath
[Fact]
public void GetCachedPagePath_ReturnNullIfNoFiles()
public async Task GetCachedPagePath_ReturnNullIfNoFiles()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
@ -274,7 +221,7 @@ public class CacheServiceTests: AbstractFsTest
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
var cs = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());
@ -287,8 +234,10 @@ public class CacheServiceTests: AbstractFsTest
}
[Fact]
public void GetCachedPagePath_GetFileFromFirstFile()
public async Task GetCachedPagePath_GetFileFromFirstFile()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
@ -318,7 +267,7 @@ public class CacheServiceTests: AbstractFsTest
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
var cs = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());
@ -332,8 +281,10 @@ public class CacheServiceTests: AbstractFsTest
[Fact]
public void GetCachedPagePath_GetLastPageFromSingleFile()
public async Task GetCachedPagePath_GetLastPageFromSingleFile()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
@ -359,7 +310,7 @@ public class CacheServiceTests: AbstractFsTest
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
var cs = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());
@ -373,8 +324,10 @@ public class CacheServiceTests: AbstractFsTest
}
[Fact]
public void GetCachedPagePath_GetFileFromSecondFile()
public async Task GetCachedPagePath_GetFileFromSecondFile()
{
var (unitOfWork, context, _) = await CreateDatabase();
var filesystem = CreateFileSystem();
filesystem.AddDirectory($"{CacheDirectory}1/");
filesystem.AddFile($"{DataDirectory}1.zip", new MockFileData(""));
@ -404,7 +357,7 @@ public class CacheServiceTests: AbstractFsTest
}
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cs = new CacheService(_logger, _unitOfWork, ds,
var cs = new CacheService(_logger, unitOfWork, ds,
new ReadingItemService(Substitute.For<IArchiveService>(),
Substitute.For<IBookService>(), Substitute.For<IImageService>(), ds, Substitute.For<ILogger<ReadingItemService>>()),
Substitute.For<IBookmarkService>());

View File

@ -4,6 +4,7 @@ using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs.Filtering;
using API.Entities;
@ -18,36 +19,28 @@ using API.SignalR;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class CleanupServiceTests : AbstractDbTest
public class CleanupServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly ILogger<CleanupService> _logger = Substitute.For<ILogger<CleanupService>>();
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
private readonly IReaderService _readerService;
public CleanupServiceTests() : base()
{
Context.Library.Add(new LibraryBuilder("Manga")
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
_readerService = new ReaderService(UnitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
Substitute.For<IImageService>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()), Substitute.For<IScrobblingService>());
}
#region Setup
protected override async Task ResetDb()
private async Task<(ILogger<CleanupService>, IEventHub, IReaderService)> Setup(IUnitOfWork unitOfWork, DataContext context)
{
Context.Series.RemoveRange(Context.Series.ToList());
Context.Users.RemoveRange(Context.Users.ToList());
Context.AppUserBookmark.RemoveRange(Context.AppUserBookmark.ToList());
context.Library.Add(new LibraryBuilder("Manga")
.WithFolderPath(new FolderPathBuilder(Root + "data/").Build())
.Build());
await Context.SaveChangesAsync();
var logger = Substitute.For<ILogger<CleanupService>>();
var messageHub = Substitute.For<IEventHub>();
var readerService = new ReaderService(unitOfWork, Substitute.For<ILogger<ReaderService>>(), Substitute.For<IEventHub>(),
Substitute.For<IImageService>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()), Substitute.For<IScrobblingService>());
return (logger, messageHub, readerService);
}
#endregion
@ -63,23 +56,24 @@ public class CleanupServiceTests : AbstractDbTest
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetSeriesFormat(1000)}.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var s = new SeriesBuilder("Test 1").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
s.LibraryId = 1;
Context.Series.Add(s);
context.Series.Add(s);
s = new SeriesBuilder("Test 2").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
s.LibraryId = 1;
Context.Series.Add(s);
context.Series.Add(s);
s = new SeriesBuilder("Test 3").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(1000)}.jpg";
s.LibraryId = 1;
Context.Series.Add(s);
context.Series.Add(s);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
var cleanupService = new CleanupService(logger, unitOfWork, messageHub,
ds);
await cleanupService.DeleteSeriesCoverImages();
@ -96,22 +90,23 @@ public class CleanupServiceTests : AbstractDbTest
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetSeriesFormat(1000)}.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
// Add 2 series with cover images
var s = new SeriesBuilder("Test 1").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(1)}.jpg";
s.LibraryId = 1;
Context.Series.Add(s);
context.Series.Add(s);
s = new SeriesBuilder("Test 2").Build();
s.CoverImage = $"{ImageService.GetSeriesFormat(3)}.jpg";
s.LibraryId = 1;
Context.Series.Add(s);
context.Series.Add(s);
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
var cleanupService = new CleanupService(logger, unitOfWork, messageHub,
ds);
await cleanupService.DeleteSeriesCoverImages();
@ -130,10 +125,11 @@ public class CleanupServiceTests : AbstractDbTest
filesystem.AddFile($"{CoverImageDirectory}v01_c1000.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
// Add 2 series with cover images
Context.Series.Add(new SeriesBuilder("Test 1")
context.Series.Add(new SeriesBuilder("Test 1")
.WithVolume(new VolumeBuilder("1")
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c01.jpg").Build())
.WithCoverImage("v01_c01.jpg")
@ -142,7 +138,7 @@ public class CleanupServiceTests : AbstractDbTest
.WithLibraryId(1)
.Build());
Context.Series.Add(new SeriesBuilder("Test 2")
context.Series.Add(new SeriesBuilder("Test 2")
.WithVolume(new VolumeBuilder("1")
.WithChapter(new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter).WithCoverImage("v01_c03.jpg").Build())
.WithCoverImage("v01_c03.jpg")
@ -152,9 +148,9 @@ public class CleanupServiceTests : AbstractDbTest
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
var cleanupService = new CleanupService(logger, unitOfWork, messageHub,
ds);
await cleanupService.DeleteChapterCoverImages();
@ -174,7 +170,8 @@ public class CleanupServiceTests : AbstractDbTest
// filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetCollectionTagFormat(1000)}.jpg", new MockFileData(""));
//
// // Delete all Series to reset state
// await ResetDb();
// var (unitOfWork, context, _) = await CreateDatabase();
// var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
//
// // Add 2 series with cover images
//
@ -201,7 +198,7 @@ public class CleanupServiceTests : AbstractDbTest
//
// await _context.SaveChangesAsync();
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
// var cleanupService = new CleanupService(logger, _unitOfWork, messageHub,
// ds);
//
// await cleanupService.DeleteTagCoverImages();
@ -221,9 +218,10 @@ public class CleanupServiceTests : AbstractDbTest
filesystem.AddFile($"{CoverImageDirectory}{ImageService.GetReadingListFormat(3)}.jpg", new MockFileData(""));
// Delete all Series to reset state
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
Context.Users.Add(new AppUser()
context.Users.Add(new AppUser()
{
UserName = "Joe",
ReadingLists = new List<ReadingList>()
@ -239,10 +237,9 @@ public class CleanupServiceTests : AbstractDbTest
}
});
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
await cleanupService.DeleteReadingListCoverImages();
@ -253,29 +250,33 @@ public class CleanupServiceTests : AbstractDbTest
#region CleanupCacheDirectory
[Fact]
public void CleanupCacheDirectory_ClearAllFiles()
public async Task CleanupCacheDirectory_ClearAllFiles()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}01.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}02.jpg", new MockFileData(""));
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
cleanupService.CleanupCacheAndTempDirectories();
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
}
[Fact]
public void CleanupCacheDirectory_ClearAllFilesInSubDirectory()
public async Task CleanupCacheDirectory_ClearAllFilesInSubDirectory()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}01.jpg", new MockFileData(""));
filesystem.AddFile($"{CacheDirectory}subdir/02.jpg", new MockFileData(""));
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
cleanupService.CleanupCacheAndTempDirectories();
Assert.Empty(ds.GetFiles(CacheDirectory, searchOption: SearchOption.AllDirectories));
}
@ -296,9 +297,11 @@ public class CleanupServiceTests : AbstractDbTest
filesystem.AddFile($"{BackupDirectory}kavita_backup_12_3_2021_9_27_58 AM.zip", filesystemFile);
filesystem.AddFile($"{BackupDirectory}randomfile.zip", filesystemFile);
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
await cleanupService.CleanupBackups();
Assert.Single(ds.GetFiles(BackupDirectory, searchOption: SearchOption.AllDirectories));
}
@ -318,9 +321,11 @@ public class CleanupServiceTests : AbstractDbTest
CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(14))
});
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
await cleanupService.CleanupBackups();
Assert.True(filesystem.File.Exists($"{BackupDirectory}randomfile.zip"));
}
@ -342,9 +347,11 @@ public class CleanupServiceTests : AbstractDbTest
});
}
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
await cleanupService.CleanupLogs();
Assert.Single(ds.GetFiles(LogDirectory, searchOption: SearchOption.AllDirectories));
}
@ -370,10 +377,12 @@ public class CleanupServiceTests : AbstractDbTest
CreationTime = DateTimeOffset.Now.Subtract(TimeSpan.FromDays(31 - 11))
});
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var cleanupService = new CleanupService(_logger, UnitOfWork, _messageHub,
ds);
var cleanupService = new CleanupService(logger, unitOfWork, messageHub, ds);
await cleanupService.CleanupLogs();
Assert.True(filesystem.File.Exists($"{LogDirectory}kavita20200911.log"));
}
@ -385,6 +394,9 @@ public class CleanupServiceTests : AbstractDbTest
[Fact]
public async Task CleanupDbEntries_CleanupAbandonedChapters()
{
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var c = new ChapterBuilder(API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)
.WithPages(1)
.Build();
@ -396,47 +408,50 @@ public class CleanupServiceTests : AbstractDbTest
.Build();
series.Library = new LibraryBuilder("Test LIb").Build();
Context.Series.Add(series);
context.Series.Add(series);
Context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007"
});
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _readerService.MarkChaptersUntilAsRead(user, 1, 5);
await Context.SaveChangesAsync();
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await readerService.MarkChaptersUntilAsRead(user, 1, 5);
await context.SaveChangesAsync();
// Validate correct chapters have read status
Assert.Equal(1, (await UnitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
Assert.Equal(1, (await unitOfWork.AppUserProgressRepository.GetUserProgressAsync(1, 1)).PagesRead);
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), unitOfWork,
Substitute.For<IEventHub>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
// Delete the Chapter
Context.Chapter.Remove(c);
await UnitOfWork.CommitAsync();
Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
context.Chapter.Remove(c);
await unitOfWork.CommitAsync();
Assert.Empty(await unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
// NOTE: This may not be needed, the underlying DB structure seems fixed as of v0.7
await cleanupService.CleanupDbEntries();
Assert.Empty(await UnitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
Assert.Empty(await unitOfWork.AppUserProgressRepository.GetUserProgressForSeriesAsync(1, 1));
}
[Fact]
public async Task CleanupDbEntries_RemoveTagsWithoutSeries()
{
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var s = new SeriesBuilder("Test")
.WithFormat(MangaFormat.Epub)
.WithMetadata(new SeriesMetadataBuilder().Build())
.Build();
s.Library = new LibraryBuilder("Test LIb").Build();
Context.Series.Add(s);
context.Series.Add(s);
var c = new AppUserCollection()
{
@ -446,24 +461,24 @@ public class CleanupServiceTests : AbstractDbTest
Items = new List<Series>() {s}
};
Context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Collections = new List<AppUserCollection>() {c}
});
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), unitOfWork,
Substitute.For<IEventHub>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
// Delete the Chapter
Context.Series.Remove(s);
await UnitOfWork.CommitAsync();
context.Series.Remove(s);
await unitOfWork.CommitAsync();
await cleanupService.CleanupDbEntries();
Assert.Empty(await UnitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
Assert.Empty(await unitOfWork.CollectionTagRepository.GetAllCollectionsAsync());
}
#endregion
@ -473,22 +488,23 @@ public class CleanupServiceTests : AbstractDbTest
[Fact]
public async Task CleanupWantToRead_ShouldRemoveFullyReadSeries()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var s = new SeriesBuilder("Test CleanupWantToRead_ShouldRemoveFullyReadSeries")
.WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Completed).Build())
.Build();
s.Library = new LibraryBuilder("Test LIb").Build();
Context.Series.Add(s);
context.Series.Add(s);
var user = new AppUser()
{
UserName = "CleanupWantToRead_ShouldRemoveFullyReadSeries",
};
Context.AppUser.Add(user);
context.AppUser.Add(user);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
// Add want to read
user.WantToRead = new List<AppUserWantToRead>()
@ -498,12 +514,12 @@ public class CleanupServiceTests : AbstractDbTest
SeriesId = s.Id
}
};
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
await _readerService.MarkSeriesAsRead(user, s.Id);
await UnitOfWork.CommitAsync();
await readerService.MarkSeriesAsRead(user, s.Id);
await unitOfWork.CommitAsync();
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), unitOfWork,
Substitute.For<IEventHub>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
@ -511,7 +527,7 @@ public class CleanupServiceTests : AbstractDbTest
await cleanupService.CleanupWantToRead();
var wantToRead =
await UnitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
await unitOfWork.SeriesRepository.GetWantToReadForUserAsync(user.Id, new UserParams(), new FilterDto());
Assert.Equal(0, wantToRead.TotalCount);
}
@ -522,7 +538,8 @@ public class CleanupServiceTests : AbstractDbTest
[Fact]
public async Task ConsolidateProgress_ShouldRemoveDuplicates()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var s = new SeriesBuilder("Test ConsolidateProgress_ShouldRemoveDuplicates")
.WithVolume(new VolumeBuilder("1")
@ -533,15 +550,15 @@ public class CleanupServiceTests : AbstractDbTest
.Build();
s.Library = new LibraryBuilder("Test Lib").Build();
Context.Series.Add(s);
context.Series.Add(s);
var user = new AppUser()
{
UserName = "ConsolidateProgress_ShouldRemoveDuplicates",
};
Context.AppUser.Add(user);
context.AppUser.Add(user);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
// Add 2 progress events
user.Progresses ??= [];
@ -553,7 +570,7 @@ public class CleanupServiceTests : AbstractDbTest
LibraryId = s.LibraryId,
PagesRead = 1,
});
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
// Add a duplicate with higher page number
user.Progresses.Add(new AppUserProgress()
@ -564,18 +581,18 @@ public class CleanupServiceTests : AbstractDbTest
LibraryId = s.LibraryId,
PagesRead = 3,
});
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
Assert.Equal(2, (await UnitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
Assert.Equal(2, (await unitOfWork.AppUserProgressRepository.GetAllProgress()).Count());
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), unitOfWork,
Substitute.For<IEventHub>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
await cleanupService.ConsolidateProgress();
var progress = await UnitOfWork.AppUserProgressRepository.GetAllProgress();
var progress = await unitOfWork.AppUserProgressRepository.GetAllProgress();
Assert.Single(progress);
Assert.True(progress.First().PagesRead == 3);
@ -588,7 +605,8 @@ public class CleanupServiceTests : AbstractDbTest
[Fact]
public async Task EnsureChapterProgressIsCapped_ShouldNormalizeProgress()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
var s = new SeriesBuilder("Test CleanupWantToRead_ShouldRemoveFullyReadSeries")
.WithMetadata(new SeriesMetadataBuilder().WithPublicationStatus(PublicationStatus.Completed).Build())
@ -601,50 +619,50 @@ public class CleanupServiceTests : AbstractDbTest
{
new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume).WithChapter(c).Build()
};
Context.Series.Add(s);
context.Series.Add(s);
var user = new AppUser()
{
UserName = "EnsureChapterProgressIsCapped",
Progresses = new List<AppUserProgress>()
};
Context.AppUser.Add(user);
context.AppUser.Add(user);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
await _readerService.MarkChaptersAsRead(user, s.Id, new List<Chapter>() {c});
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, s.Id, new List<Chapter>() {c});
await unitOfWork.CommitAsync();
var chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
var chapter = await unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
await unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
Assert.NotNull(chapter);
Assert.Equal(2, chapter.PagesRead);
// Update chapter to have 1 page
c.Pages = 1;
UnitOfWork.ChapterRepository.Update(c);
await UnitOfWork.CommitAsync();
unitOfWork.ChapterRepository.Update(c);
await unitOfWork.CommitAsync();
chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
chapter = await unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
await unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
Assert.NotNull(chapter);
Assert.Equal(2, chapter.PagesRead);
Assert.Equal(1, chapter.Pages);
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), UnitOfWork,
var cleanupService = new CleanupService(Substitute.For<ILogger<CleanupService>>(), unitOfWork,
Substitute.For<IEventHub>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()));
await cleanupService.EnsureChapterProgressIsCapped();
chapter = await UnitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
await UnitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
chapter = await unitOfWork.ChapterRepository.GetChapterDtoAsync(c.Id);
await unitOfWork.ChapterRepository.AddChapterModifiers(user.Id, chapter);
Assert.NotNull(chapter);
Assert.Equal(1, chapter.PagesRead);
Context.AppUser.Remove(user);
await UnitOfWork.CommitAsync();
context.AppUser.Remove(user);
await unitOfWork.CommitAsync();
}
#endregion
@ -658,7 +676,8 @@ public class CleanupServiceTests : AbstractDbTest
// filesystem.AddFile($"{BookmarkDirectory}1/1/1/0002.jpg", new MockFileData(""));
//
// // Delete all Series to reset state
// await ResetDb();
// var (unitOfWork, context, _) = await CreateDatabase();
// var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
//
// _context.Series.Add(new Series()
// {
@ -713,7 +732,7 @@ public class CleanupServiceTests : AbstractDbTest
//
//
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
// var cleanupService = new CleanupService(logger, _unitOfWork, messageHub,
// ds);
//
// await cleanupService.CleanupBookmarks();
@ -730,7 +749,8 @@ public class CleanupServiceTests : AbstractDbTest
// filesystem.AddFile($"{BookmarkDirectory}1/1/2/0002.jpg", new MockFileData(""));
//
// // Delete all Series to reset state
// await ResetDb();
// var (unitOfWork, context, _) = await CreateDatabase();
// var (logger, messageHub, readerService) = await Setup(unitOfWork, context);
//
// _context.Series.Add(new Series()
// {
@ -776,7 +796,7 @@ public class CleanupServiceTests : AbstractDbTest
//
//
// var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
// var cleanupService = new CleanupService(_logger, _unitOfWork, _messageHub,
// var cleanupService = new CleanupService(logger, _unitOfWork, messageHub,
// ds);
//
// await cleanupService.CleanupBookmarks();

View File

@ -14,33 +14,26 @@ using API.Services.Plus;
using API.SignalR;
using Kavita.Common;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class CollectionTagServiceTests : AbstractDbTest
public class CollectionTagServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly ICollectionTagService _service;
public CollectionTagServiceTests()
{
_service = new CollectionTagService(UnitOfWork, Substitute.For<IEventHub>());
}
protected override async Task ResetDb()
private static async Task<ICollectionTagService> Setup(IUnitOfWork unitOfWork, DataContext context)
{
Context.AppUserCollection.RemoveRange(Context.AppUserCollection.ToList());
Context.Library.RemoveRange(Context.Library.ToList());
await UnitOfWork.CommitAsync();
}
private async Task SeedSeries()
{
if (Context.AppUserCollection.Any()) return;
if (context.AppUserCollection.Any())
{
return new CollectionTagService(unitOfWork, Substitute.For<IEventHub>());
}
var s1 = new SeriesBuilder("Series 1").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Mature).Build()).Build();
var s2 = new SeriesBuilder("Series 2").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.G).Build()).Build();
Context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
context.Library.Add(new LibraryBuilder("Library 2", LibraryType.Manga)
.WithSeries(s1)
.WithSeries(s2)
.Build());
@ -51,9 +44,11 @@ public class CollectionTagServiceTests : AbstractDbTest
new AppUserCollectionBuilder("Tag 1").WithItems(new []{s1}).Build(),
new AppUserCollectionBuilder("Tag 2").WithItems(new []{s1, s2}).WithIsPromoted(true).Build()
};
UnitOfWork.UserRepository.Add(user);
unitOfWork.UserRepository.Add(user);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
return new CollectionTagService(unitOfWork, Substitute.For<IEventHub>());
}
#region DeleteTag
@ -61,18 +56,18 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task DeleteTag_ShouldDeleteTag_WhenTagExists()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Act
var result = await _service.DeleteTag(1, user);
var result = await service.DeleteTag(1, user);
// Assert
Assert.True(result);
var deletedTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var deletedTag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.Null(deletedTag);
Assert.Single(user.Collections); // Only one collection should remain
}
@ -80,13 +75,14 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task DeleteTag_ShouldReturnTrue_WhenTagDoesNotExist()
{
// Arrange
await SeedSeries();
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Act - Try to delete a non-existent tag
var result = await _service.DeleteTag(999, user);
var result = await service.DeleteTag(999, user);
// Assert
Assert.True(result); // Should return true because the tag is already "deleted"
@ -96,17 +92,18 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task DeleteTag_ShouldNotAffectOtherTags()
{
// Arrange
await SeedSeries();
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Act
var result = await _service.DeleteTag(1, user);
var result = await service.DeleteTag(1, user);
// Assert
Assert.True(result);
var remainingTag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
var remainingTag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
Assert.NotNull(remainingTag);
Assert.Equal("Tag 2", remainingTag.Title);
Assert.True(remainingTag.Promoted);
@ -119,16 +116,17 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task UpdateTag_ShouldUpdateFields()
{
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldUpdateFields").WithIsPromoted(true).Build());
UnitOfWork.UserRepository.Update(user);
await UnitOfWork.CommitAsync();
unitOfWork.UserRepository.Update(user);
await unitOfWork.CommitAsync();
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "UpdateTag_ShouldUpdateFields",
Id = 3,
@ -137,7 +135,7 @@ public class CollectionTagServiceTests : AbstractDbTest
AgeRating = AgeRating.Unknown
}, 1);
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
Assert.NotNull(tag);
Assert.True(tag.Promoted);
Assert.False(string.IsNullOrEmpty(tag.Summary));
@ -149,16 +147,17 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource()
{
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
user.Collections.Add(new AppUserCollectionBuilder("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource").WithSource(ScrobbleProvider.Mal).Build());
UnitOfWork.UserRepository.Update(user);
await UnitOfWork.CommitAsync();
unitOfWork.UserRepository.Update(user);
await unitOfWork.CommitAsync();
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "New Title",
Id = 3,
@ -167,7 +166,7 @@ public class CollectionTagServiceTests : AbstractDbTest
AgeRating = AgeRating.Unknown
}, 1);
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(3);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(3);
Assert.NotNull(tag);
Assert.Equal("UpdateTag_ShouldNotChangeTitle_WhenNotKavitaSource", tag.Title);
Assert.False(string.IsNullOrEmpty(tag.Summary));
@ -177,10 +176,11 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldThrowException_WhenTagDoesNotExist()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Act & Assert
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
var exception = await Assert.ThrowsAsync<KavitaException>(() => service.UpdateTag(new AppUserCollectionDto()
{
Title = "Non-existent Tag",
Id = 999, // Non-existent ID
@ -194,15 +194,16 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldThrowException_WhenUserDoesNotOwnTag()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Create a second user
var user2 = new AppUserBuilder("user2", "user2", Seed.DefaultThemes.First()).Build();
UnitOfWork.UserRepository.Add(user2);
await UnitOfWork.CommitAsync();
unitOfWork.UserRepository.Add(user2);
await unitOfWork.CommitAsync();
// Act & Assert
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
var exception = await Assert.ThrowsAsync<KavitaException>(() => service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 1",
Id = 1, // This belongs to user1
@ -216,10 +217,11 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldThrowException_WhenTitleIsEmpty()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Act & Assert
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
var exception = await Assert.ThrowsAsync<KavitaException>(() => service.UpdateTag(new AppUserCollectionDto()
{
Title = " ", // Empty after trimming
Id = 1,
@ -233,10 +235,11 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldThrowException_WhenTitleAlreadyExists()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Act & Assert
var exception = await Assert.ThrowsAsync<KavitaException>(() => _service.UpdateTag(new AppUserCollectionDto()
var exception = await Assert.ThrowsAsync<KavitaException>(() => service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 2", // Already exists
Id = 1, // Trying to rename Tag 1 to Tag 2
@ -250,10 +253,11 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldUpdateCoverImageSettings()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Act
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 1",
Id = 1,
@ -261,19 +265,19 @@ public class CollectionTagServiceTests : AbstractDbTest
}, 1);
// Assert
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.True(tag.CoverImageLocked);
// Now test unlocking the cover image
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 1",
Id = 1,
CoverImageLocked = false
}, 1);
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.False(tag.CoverImageLocked);
Assert.Equal(string.Empty, tag.CoverImage);
@ -283,16 +287,17 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldAllowPromoteForAdminRole()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Setup a user with admin role
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
await AddUserWithRole(user.Id, PolicyConstants.AdminRole);
await AddUserWithRole(context, user.Id, PolicyConstants.AdminRole);
// Act - Try to promote a tag that wasn't previously promoted
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 1",
Id = 1,
@ -300,7 +305,7 @@ public class CollectionTagServiceTests : AbstractDbTest
}, 1);
// Assert
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.True(tag.Promoted);
}
@ -309,17 +314,18 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldAllowPromoteForPromoteRole()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Setup a user with promote role
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Mock to return promote role for the user
await AddUserWithRole(user.Id, PolicyConstants.PromoteRole);
await AddUserWithRole(context, user.Id, PolicyConstants.PromoteRole);
// Act - Try to promote a tag that wasn't previously promoted
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 1",
Id = 1,
@ -327,7 +333,7 @@ public class CollectionTagServiceTests : AbstractDbTest
}, 1);
// Assert
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.True(tag.Promoted);
}
@ -336,14 +342,15 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task UpdateTag_ShouldNotChangePromotion_WhenUserHasNoPermission()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Setup a user with no special roles
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Act - Try to promote a tag without proper role
await _service.UpdateTag(new AppUserCollectionDto()
await service.UpdateTag(new AppUserCollectionDto()
{
Title = "Tag 1",
Id = 1,
@ -351,7 +358,7 @@ public class CollectionTagServiceTests : AbstractDbTest
}, 1);
// Assert
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.False(tag.Promoted); // Should remain unpromoted
}
@ -363,17 +370,18 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task RemoveTagFromSeries_RemoveSeriesFromTag()
{
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Tag 2 has 2 series
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
Assert.NotNull(tag);
await _service.RemoveTagFromSeries(tag, new[] {1});
var userCollections = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
await service.RemoveTagFromSeries(tag, new[] {1});
var userCollections = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.Equal(2, userCollections!.Collections.Count);
Assert.Single(tag.Items);
Assert.Equal(2, tag.Items.First().Id);
@ -385,16 +393,17 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task RemoveTagFromSeries_RemoveSeriesFromTag_UpdatesRating()
{
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Tag 2 has 2 series
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
Assert.NotNull(tag);
await _service.RemoveTagFromSeries(tag, new[] {1});
await service.RemoveTagFromSeries(tag, new[] {1});
Assert.Equal(AgeRating.G, tag.AgeRating);
}
@ -405,25 +414,28 @@ public class CollectionTagServiceTests : AbstractDbTest
[Fact]
public async Task RemoveTagFromSeries_RemoveSeriesFromTag_DeleteTagWhenNoSeriesLeft()
{
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Collections);
Assert.NotNull(user);
// Tag 1 has 1 series
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
await _service.RemoveTagFromSeries(tag, new[] {1});
var tag2 = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
await service.RemoveTagFromSeries(tag, new[] {1});
var tag2 = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.Null(tag2);
}
[Fact]
public async Task RemoveTagFromSeries_ShouldReturnFalse_WhenTagIsNull()
{
// Act
var result = await _service.RemoveTagFromSeries(null, [1]);
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var result = await service.RemoveTagFromSeries(null, [1]);
// Assert
Assert.False(result);
@ -433,18 +445,19 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task RemoveTagFromSeries_ShouldHandleEmptySeriesIdsList()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
var initialItemCount = tag.Items.Count;
// Act
var result = await _service.RemoveTagFromSeries(tag, Array.Empty<int>());
var result = await service.RemoveTagFromSeries(tag, Array.Empty<int>());
// Assert
Assert.True(result);
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
}
@ -453,18 +466,19 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task RemoveTagFromSeries_ShouldHandleNonExistentSeriesIds()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
var initialItemCount = tag.Items.Count;
// Act - Try to remove a series that doesn't exist in the tag
var result = await _service.RemoveTagFromSeries(tag, [999]);
var result = await service.RemoveTagFromSeries(tag, [999]);
// Assert
Assert.True(result);
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
Assert.Equal(initialItemCount, tag.Items.Count); // No items should be removed
}
@ -473,23 +487,24 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task RemoveTagFromSeries_ShouldHandleNullItemsList()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.NotNull(tag);
// Force null items list
tag.Items = null;
UnitOfWork.CollectionTagRepository.Update(tag);
await UnitOfWork.CommitAsync();
unitOfWork.CollectionTagRepository.Update(tag);
await unitOfWork.CommitAsync();
// Act
var result = await _service.RemoveTagFromSeries(tag, [1]);
var result = await service.RemoveTagFromSeries(tag, [1]);
// Assert
Assert.True(result);
// The tag should not be removed since the items list was null, not empty
var tagAfter = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(1);
var tagAfter = await unitOfWork.CollectionTagRepository.GetCollectionAsync(1);
Assert.Null(tagAfter);
}
@ -497,25 +512,26 @@ public class CollectionTagServiceTests : AbstractDbTest
public async Task RemoveTagFromSeries_ShouldUpdateAgeRating_WhenMultipleSeriesRemain()
{
// Arrange
await SeedSeries();
var (unitOfWork, context, _) = await CreateDatabase();
var service = await Setup(unitOfWork, context);
// Add a third series with a different age rating
var s3 = new SeriesBuilder("Series 3").WithMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.PG).Build()).Build();
Context.Library.First().Series.Add(s3);
await UnitOfWork.CommitAsync();
context.Library.First().Series.Add(s3);
await unitOfWork.CommitAsync();
// Add series 3 to tag 2
var tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
var tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
Assert.NotNull(tag);
tag.Items.Add(s3);
UnitOfWork.CollectionTagRepository.Update(tag);
await UnitOfWork.CommitAsync();
unitOfWork.CollectionTagRepository.Update(tag);
await unitOfWork.CommitAsync();
// Act - Remove the series with Mature rating
await _service.RemoveTagFromSeries(tag, new[] {1});
await service.RemoveTagFromSeries(tag, new[] {1});
// Assert
tag = await UnitOfWork.CollectionTagRepository.GetCollectionAsync(2);
tag = await unitOfWork.CollectionTagRepository.GetCollectionAsync(2);
Assert.NotNull(tag);
Assert.Equal(2, tag.Items.Count);

View File

@ -3,6 +3,7 @@ using System.IO.Abstractions;
using System.Reflection;
using System.Threading.Tasks;
using API.Constants;
using API.Data;
using API.Entities.Enums;
using API.Extensions;
using API.Services;
@ -14,14 +15,13 @@ using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class CoverDbServiceTests : AbstractDbTest
public class CoverDbServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly DirectoryService _directoryService;
private readonly IEasyCachingProviderFactory _cacheFactory = Substitute.For<IEasyCachingProviderFactory>();
private readonly ICoverDbService _coverDbService;
private static readonly IEasyCachingProviderFactory CacheFactory = Substitute.For<IEasyCachingProviderFactory>();
private static readonly string FaviconPath = Path.Join(Directory.GetCurrentDirectory(),
"../../../Services/Test Data/CoverDbService/Favicons");
@ -31,57 +31,59 @@ public class CoverDbServiceTests : AbstractDbTest
private static readonly string TempPath = Path.Join(Directory.GetCurrentDirectory(),
"../../../Services/Test Data/CoverDbService/Temp");
public CoverDbServiceTests()
{
_directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), CreateFileSystem());
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), _directoryService);
_coverDbService = new CoverDbService(Substitute.For<ILogger<CoverDbService>>(), _directoryService, _cacheFactory,
Substitute.For<IHostEnvironment>(), imageService, UnitOfWork, Substitute.For<IEventHub>());
}
protected override Task ResetDb()
private static async Task<(IDirectoryService, ICoverDbService)> Setup(IUnitOfWork unitOfWork)
{
throw new System.NotImplementedException();
var directoryService = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), CreateFileSystem());
var imageService = new ImageService(Substitute.For<ILogger<ImageService>>(), directoryService);
var coverDbService = new CoverDbService(Substitute.For<ILogger<CoverDbService>>(), directoryService, CacheFactory,
Substitute.For<IHostEnvironment>(), imageService, unitOfWork, Substitute.For<IEventHub>());
return (directoryService, coverDbService);
}
#region Download Favicon
/// <summary>
/// I cannot figure out how to test this code due to the reliance on the _directoryService.FaviconDirectory and not being
/// I cannot figure out how to test this code due to the reliance on the directoryService.FaviconDirectory and not being
/// able to redirect it to the real filesystem.
/// </summary>
public async Task DownloadFaviconAsync_ShouldDownloadAndMatchExpectedFavicon()
{
var (unitOfWork, context, _) = await CreateDatabase();
var (directoryService, coverDbService) = await Setup(unitOfWork);
// Arrange
var testUrl = "https://anilist.co/anime/6205/Kmpfer/";
var encodeFormat = EncodeFormat.WEBP;
var expectedFaviconPath = Path.Combine(FaviconPath, "anilist.co.webp");
// Ensure TempPath exists
_directoryService.ExistOrCreate(TempPath);
directoryService.ExistOrCreate(TempPath);
var baseUrl = "https://anilist.co";
// Ensure there is no cache result for this URL
var provider = Substitute.For<IEasyCachingProvider>();
provider.GetAsync<string>(baseUrl).Returns(new CacheValue<string>(null, false));
_cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
CacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
// // Replace favicon directory with TempPath
// var directoryService = (DirectoryService)_directoryService;
// var directoryService = (DirectoryService)directoryService;
// directoryService.FaviconDirectory = TempPath;
// Hack: Swap FaviconDirectory with TempPath for ability to download real files
typeof(DirectoryService)
.GetField("FaviconDirectory", BindingFlags.NonPublic | BindingFlags.Instance)
?.SetValue(_directoryService, TempPath);
?.SetValue(directoryService, TempPath);
// Act
var resultFilename = await _coverDbService.DownloadFaviconAsync(testUrl, encodeFormat);
var resultFilename = await coverDbService.DownloadFaviconAsync(testUrl, encodeFormat);
var actualFaviconPath = Path.Combine(TempPath, resultFilename);
// Assert file exists
@ -96,6 +98,9 @@ public class CoverDbServiceTests : AbstractDbTest
[Fact]
public async Task DownloadFaviconAsync_ShouldThrowKavitaException_WhenPreviouslyFailedUrlExistsInCache()
{
var (unitOfWork, context, _) = await CreateDatabase();
var (directoryService, coverDbService) = await Setup(unitOfWork);
// Arrange
var testUrl = "https://example.com";
var encodeFormat = EncodeFormat.WEBP;
@ -104,11 +109,11 @@ public class CoverDbServiceTests : AbstractDbTest
provider.GetAsync<string>(Arg.Any<string>())
.Returns(new CacheValue<string>(string.Empty, true)); // Simulate previous failure
_cacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
CacheFactory.GetCachingProvider(EasyCacheProfiles.Favicon).Returns(provider);
// Act & Assert
await Assert.ThrowsAsync<KavitaException>(() =>
_coverDbService.DownloadFaviconAsync(testUrl, encodeFormat));
coverDbService.DownloadFaviconAsync(testUrl, encodeFormat));
}
#endregion

View File

@ -1,37 +1,33 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Device;
using API.Entities;
using API.Entities.Enums.Device;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class DeviceServiceDbTests : AbstractDbTest
public class DeviceServiceDbTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly ILogger<DeviceService> _logger = Substitute.For<ILogger<DeviceService>>();
private readonly IDeviceService _deviceService;
public DeviceServiceDbTests() : base()
private async Task<IDeviceService> Setup(IUnitOfWork unitOfWork)
{
_deviceService = new DeviceService(UnitOfWork, _logger, Substitute.For<IEmailService>());
return new DeviceService(unitOfWork, _logger, Substitute.For<IEmailService>());
}
protected override async Task ResetDb()
{
Context.Users.RemoveRange(Context.Users.ToList());
await UnitOfWork.CommitAsync();
}
[Fact]
public async Task CreateDevice_Succeeds()
{
var (unitOfWork, context, _) = await CreateDatabase();
var deviceService = await Setup(unitOfWork);
var user = new AppUser()
{
@ -39,10 +35,10 @@ public class DeviceServiceDbTests : AbstractDbTest
Devices = new List<Device>()
};
Context.Users.Add(user);
await UnitOfWork.CommitAsync();
context.Users.Add(user);
await unitOfWork.CommitAsync();
var device = await _deviceService.Create(new CreateDeviceDto()
var device = await deviceService.Create(new CreateDeviceDto()
{
EmailAddress = "fake@kindle.com",
Name = "Test Kindle",
@ -55,6 +51,8 @@ public class DeviceServiceDbTests : AbstractDbTest
[Fact]
public async Task CreateDevice_ThrowsErrorWhenEmailDoesntMatchRules()
{
var (unitOfWork, context, _) = await CreateDatabase();
var deviceService = await Setup(unitOfWork);
var user = new AppUser()
{
@ -62,10 +60,10 @@ public class DeviceServiceDbTests : AbstractDbTest
Devices = new List<Device>()
};
Context.Users.Add(user);
await UnitOfWork.CommitAsync();
context.Users.Add(user);
await unitOfWork.CommitAsync();
var device = await _deviceService.Create(new CreateDeviceDto()
var device = await deviceService.Create(new CreateDeviceDto()
{
EmailAddress = "fake@gmail.com",
Name = "Test Kindle",

View File

@ -187,6 +187,12 @@ public class DirectoryServiceTests: AbstractFsTest
[Fact]
public void GetFiles_All_MixedPathSeparators()
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
return;
}
const string testDirectory = "/manga/";
var fileSystem = new MockFileSystem();
for (var i = 0; i < 10; i++)
@ -810,6 +816,12 @@ public class DirectoryServiceTests: AbstractFsTest
[InlineData(@"M:\", @"M:\Toukyou Akazukin\Vol. 01 Ch. 005.cbz", @"Toukyou Akazukin")]
public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
_testOutputHelper.WriteLine("Skipping test on non Windows platform");
return;
}
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(rootPath);
fileSystem.AddFile(fullpath, new MockFileData(""));

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,588 @@
using System.Collections.Generic;
using System.IdentityModel.Tokens.Jwt;
using System.Linq;
using System.Security.Claims;
using System.Threading.Tasks;
using API.Constants;
using API.Data;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Helpers.Builders;
using API.Services;
using AutoMapper;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class OidcServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
[Fact]
public async Task UserSync_Username()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, _, _, userManager) = await Setup(unitOfWork, context, mapper);
var user = new AppUserBuilder("holo", "holo@localhost")
.WithIdentityProvider(IdentityProvider.OpenIdConnect)
.Build();
var res = await userManager.CreateAsync(user);
Assert.Empty(res.Errors);
Assert.True(res.Succeeded);
var claims = new List<Claim>()
{
new (ClaimTypes.Name, "amelia"),
new (ClaimTypes.GivenName, "Lawrence"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
// name is updated as the current username is not found, amelia is skipped as it is alredy in use
await oidcService.SyncUserSettings(null!, settings, principal, user);
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal("Lawrence", user.UserName);
claims = new List<Claim>()
{
new (ClaimTypes.Name, "amelia"),
new (ClaimTypes.GivenName, "Lawrence"),
new (ClaimTypes.Surname, "Norah"),
};
identity = new ClaimsIdentity(claims);
principal = new ClaimsPrincipal(identity);
// Ensure a name longer down the list isn't picked if the current username is found
await oidcService.SyncUserSettings(null!, settings, principal, user);
dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal("Lawrence", user.UserName);
}
[Fact]
public async Task UserSync_CustomClaim()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
const string claim = "groups";
var claims = new List<Claim>()
{
new (claim, PolicyConstants.LoginRole),
new (claim, PolicyConstants.DownloadRole),
new (ClaimTypes.Role, PolicyConstants.PromoteRole),
new (claim, OidcService.AgeRestrictionPrefix + "M"),
new (claim, OidcService.LibraryAccessPrefix + "Manga"),
new (ClaimTypes.Role, OidcService.LibraryAccessPrefix + "Light Novels"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
RolesClaim = claim,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
// Check correct roles assigned
var userRoles = await unitOfWork.UserRepository.GetRoles(user.Id);
Assert.Contains(PolicyConstants.LoginRole, userRoles);
Assert.Contains(PolicyConstants.DownloadRole, userRoles);
Assert.DoesNotContain(PolicyConstants.PromoteRole, userRoles);
// Check correct libraries
var libraries = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).Select(l => l.Name).ToList();
Assert.Single(libraries);
Assert.Contains(mangaLib.Name, libraries);
Assert.DoesNotContain(lightNovelsLib.Name, libraries);
// Check correct age restrictions
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.Mature, dbUser.AgeRestriction);
Assert.False(dbUser.AgeRestrictionIncludeUnknowns);
}
[Fact]
public async Task UserSync_CustomPrefix()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
const string prefix = "kavita-";
var claims = new List<Claim>()
{
new (ClaimTypes.Role, prefix + PolicyConstants.LoginRole),
new (ClaimTypes.Role, prefix + PolicyConstants.DownloadRole),
new (ClaimTypes.Role, PolicyConstants.PromoteRole),
new (ClaimTypes.Role, prefix + OidcService.AgeRestrictionPrefix + "M"),
new (ClaimTypes.Role, prefix + OidcService.LibraryAccessPrefix + "Manga"),
new (ClaimTypes.Role, OidcService.LibraryAccessPrefix + "Light Novels"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
RolesPrefix = prefix,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
// Check correct roles assigned
var userRoles = await unitOfWork.UserRepository.GetRoles(user.Id);
Assert.Contains(PolicyConstants.LoginRole, userRoles);
Assert.Contains(PolicyConstants.DownloadRole, userRoles);
Assert.DoesNotContain(PolicyConstants.PromoteRole, userRoles);
// Check correct libraries
var libraries = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).Select(l => l.Name).ToList();
Assert.Single(libraries);
Assert.Contains(mangaLib.Name, libraries);
Assert.DoesNotContain(lightNovelsLib.Name, libraries);
// Check correct age restrictions
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.Mature, dbUser.AgeRestriction);
Assert.False(dbUser.AgeRestrictionIncludeUnknowns);
}
[Fact]
public async Task SyncRoles()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var claims = new List<Claim>()
{
new (ClaimTypes.Role, PolicyConstants.LoginRole),
new (ClaimTypes.Role, PolicyConstants.DownloadRole),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var userRoles = await unitOfWork.UserRepository.GetRoles(user.Id);
Assert.Contains(PolicyConstants.LoginRole, userRoles);
Assert.Contains(PolicyConstants.DownloadRole, userRoles);
// Only give one role
claims = [new Claim(ClaimTypes.Role, PolicyConstants.LoginRole)];
identity = new ClaimsIdentity(claims);
principal = new ClaimsPrincipal(identity);
await oidcService.SyncUserSettings(null!, settings, principal, user);
userRoles = await unitOfWork.UserRepository.GetRoles(user.Id);
Assert.Contains(PolicyConstants.LoginRole, userRoles);
Assert.DoesNotContain(PolicyConstants.DownloadRole, userRoles);
}
[Fact]
public async Task SyncLibraries()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var mangaLib = new LibraryBuilder("Manga", LibraryType.Manga).Build();
var lightNovelsLib = new LibraryBuilder("Light Novels", LibraryType.LightNovel).Build();
unitOfWork.LibraryRepository.Add(mangaLib);
unitOfWork.LibraryRepository.Add(lightNovelsLib);
await unitOfWork.CommitAsync();
var claims = new List<Claim>()
{
new (ClaimTypes.Role, OidcService.LibraryAccessPrefix + "Manga"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var libraries = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).Select(l => l.Name).ToList();
Assert.Single(libraries);
Assert.Contains(mangaLib.Name, libraries);
Assert.DoesNotContain(lightNovelsLib.Name, libraries);
// Only give access to the other library
claims = [new Claim(ClaimTypes.Role, OidcService.LibraryAccessPrefix + "Light Novels")];
identity = new ClaimsIdentity(claims);
principal = new ClaimsPrincipal(identity);
await oidcService.SyncUserSettings(null!, settings, principal, user);
// Check access has swicthed
libraries = (await unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).Select(l => l.Name).ToList();
Assert.Single(libraries);
Assert.Contains(lightNovelsLib.Name, libraries);
Assert.DoesNotContain(mangaLib.Name, libraries);
}
[Fact]
public async Task SyncAgeRestrictions_NoRestrictions()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var claims = new List<Claim>()
{
new (ClaimTypes.Role, OidcService.AgeRestrictionPrefix + "Not Applicable"),
new(ClaimTypes.Role, OidcService.AgeRestrictionPrefix + OidcService.IncludeUnknowns),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.NotApplicable, dbUser.AgeRestriction);
Assert.True(dbUser.AgeRestrictionIncludeUnknowns);
}
[Fact]
public async Task SyncAgeRestrictions_IncludeUnknowns()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var claims = new List<Claim>()
{
new (ClaimTypes.Role, OidcService.AgeRestrictionPrefix + "M"),
new(ClaimTypes.Role, OidcService.AgeRestrictionPrefix + OidcService.IncludeUnknowns),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.Mature, dbUser.AgeRestriction);
Assert.True(dbUser.AgeRestrictionIncludeUnknowns);
}
[Fact]
public async Task SyncAgeRestriction_AdminNone()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var claims = new List<Claim>()
{
new (ClaimTypes.Role, PolicyConstants.AdminRole),
new (ClaimTypes.Role, OidcService.AgeRestrictionPrefix + "M"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.NotApplicable, dbUser.AgeRestriction);
Assert.True(dbUser.AgeRestrictionIncludeUnknowns);
}
[Fact]
public async Task SyncAgeRestriction_MultipleAgeRestrictionClaims()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var claims = new List<Claim>()
{
new (ClaimTypes.Role, OidcService.AgeRestrictionPrefix + "Teen"),
new (ClaimTypes.Role, OidcService.AgeRestrictionPrefix + "M"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.Mature, dbUser.AgeRestriction);
}
[Fact]
public async Task SyncAgeRestriction_NoAgeRestrictionClaims()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, user, _, _) = await Setup(unitOfWork, context, mapper);
var identity = new ClaimsIdentity([]);
var principal = new ClaimsPrincipal(identity);
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
await oidcService.SyncUserSettings(null!, settings, principal, user);
var dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.NotApplicable, dbUser.AgeRestriction);
Assert.True(dbUser.AgeRestrictionIncludeUnknowns);
// Also default to no restrictions when only include unknowns is present
identity = new ClaimsIdentity([new Claim(ClaimTypes.Role, OidcService.AgeRestrictionPrefix + OidcService.IncludeUnknowns)]);
principal = new ClaimsPrincipal(identity);
await oidcService.SyncUserSettings(null!, settings, principal, user);
dbUser = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(dbUser);
Assert.Equal(AgeRating.NotApplicable, dbUser.AgeRestriction);
Assert.True(dbUser.AgeRestrictionIncludeUnknowns);
}
[Fact]
public async Task SyncUserSettings_DontChangeDefaultAdmin()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, _, _, userManager) = await Setup(unitOfWork, context, mapper);
// Make user default user
var user = await unitOfWork.UserRepository.GetDefaultAdminUser();
var settings = new OidcConfigDto
{
SyncUserSettings = true,
};
var claims = new List<Claim>()
{
new (ClaimTypes.Role, PolicyConstants.ChangePasswordRole),
new (ClaimTypes.Role, OidcService.AgeRestrictionPrefix + "Teen"),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
await oidcService.SyncUserSettings(null!, settings, principal, user);
var userFromDb = await unitOfWork.UserRepository.GetUserByIdAsync(user.Id);
Assert.NotNull(userFromDb);
Assert.NotEqual(AgeRating.Teen, userFromDb.AgeRestriction);
var newUser = new AppUserBuilder("NotAnAdmin", "NotAnAdmin@localhost")
.WithIdentityProvider(IdentityProvider.OpenIdConnect)
.Build();
var res = await userManager.CreateAsync(newUser);
Assert.Empty(res.Errors);
Assert.True(res.Succeeded);
await oidcService.SyncUserSettings(null!, settings, principal, newUser);
userFromDb = await unitOfWork.UserRepository.GetUserByIdAsync(newUser.Id);
Assert.NotNull(userFromDb);
Assert.True(await userManager.IsInRoleAsync(newUser, PolicyConstants.ChangePasswordRole));
Assert.Equal(AgeRating.Teen, userFromDb.AgeRestriction);
}
[Fact]
public async Task FindBestAvailableName_NoDuplicates()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var (oidcService, _, _, userManager) = await Setup(unitOfWork, context, mapper);
const string preferredName = "PreferredName";
const string name = "Name";
const string givenName = "GivenName";
const string surname = "Surname";
const string email = "Email";
var claims = new List<Claim>()
{
new(JwtRegisteredClaimNames.PreferredUsername, preferredName),
new(ClaimTypes.Name, name),
new(ClaimTypes.GivenName, givenName),
new(ClaimTypes.Surname, surname),
new(ClaimTypes.Email, email),
};
var identity = new ClaimsIdentity(claims);
var principal = new ClaimsPrincipal(identity);
var bestName = await oidcService.FindBestAvailableName(principal);
Assert.NotNull(bestName);
Assert.Equal(preferredName, bestName);
// Create user with this name to make the method fallback to the next claim
var user = new AppUserBuilder(bestName, bestName).Build();
var res = await userManager.CreateAsync(user);
// This has actual information as to why it would fail, so we check it to make sure if the test fail here we know why
Assert.Empty(res.Errors);
Assert.True(res.Succeeded);
// Fallback to name
bestName = await oidcService.FindBestAvailableName(principal);
Assert.NotNull(bestName);
Assert.Equal(name, bestName);
user = new AppUserBuilder(bestName, bestName).Build();
res = await userManager.CreateAsync(user);
Assert.Empty(res.Errors);
Assert.True(res.Succeeded);
// Fallback to given name
bestName = await oidcService.FindBestAvailableName(principal);
Assert.NotNull(bestName);
Assert.Equal(givenName, bestName);
user = new AppUserBuilder(bestName, bestName).Build();
res = await userManager.CreateAsync(user);
Assert.Empty(res.Errors);
Assert.True(res.Succeeded);
// Fallback to surname
bestName = await oidcService.FindBestAvailableName(principal);
Assert.NotNull(bestName);
Assert.Equal(surname, bestName);
user = new AppUserBuilder(bestName, bestName).Build();
res = await userManager.CreateAsync(user);
Assert.Empty(res.Errors);
Assert.True(res.Succeeded);
// When none are found, returns null
bestName = await oidcService.FindBestAvailableName(principal);
Assert.Null(bestName);
}
private async Task<(OidcService, AppUser, IAccountService, UserManager<AppUser>)> Setup(IUnitOfWork unitOfWork, DataContext context, IMapper mapper)
{
// Remove the default library created with the AbstractDbTest class
context.Library.RemoveRange(context.Library);
await context.SaveChangesAsync();
var defaultAdmin = new AppUserBuilder("defaultAdmin", "defaultAdmin@localhost")
.WithRole(PolicyConstants.AdminRole)
.Build();
var user = new AppUserBuilder("amelia", "amelia@localhost")
.WithIdentityProvider(IdentityProvider.OpenIdConnect)
.Build();
var roleStore = new RoleStore<
AppRole,
DataContext,
int,
IdentityUserRole<int>,
IdentityRoleClaim<int>
>(context);
var roleManager = new RoleManager<AppRole>(
roleStore,
[new RoleValidator<AppRole>()],
new UpperInvariantLookupNormalizer(),
new IdentityErrorDescriber(),
Substitute.For<ILogger<RoleManager<AppRole>>>());
foreach (var role in PolicyConstants.ValidRoles)
{
if (!await roleManager.RoleExistsAsync(role))
{
await roleManager.CreateAsync(new AppRole
{
Name = role,
});
}
}
var userStore = new UserStore<
AppUser,
AppRole,
DataContext,
int,
IdentityUserClaim<int>,
AppUserRole,
IdentityUserLogin<int>,
IdentityUserToken<int>,
IdentityRoleClaim<int>
>(context);
var userManager = new UserManager<AppUser>(userStore,
new OptionsWrapper<IdentityOptions>(new IdentityOptions()),
new PasswordHasher<AppUser>(),
[new UserValidator<AppUser>()],
[new PasswordValidator<AppUser>()],
new UpperInvariantLookupNormalizer(),
new IdentityErrorDescriber(),
null!,
Substitute.For<ILogger<UserManager<AppUser>>>());
// Create users with the UserManager such that the SecurityStamp is set
await userManager.CreateAsync(user);
await userManager.CreateAsync(defaultAdmin);
var accountService = new AccountService(userManager, Substitute.For<ILogger<AccountService>>(), unitOfWork, mapper, Substitute.For<ILocalizationService>());
var oidcService = new OidcService(Substitute.For<ILogger<OidcService>>(), userManager, unitOfWork, accountService, Substitute.For<IEmailService>());
return (oidcService, user, accountService, userManager);
}
}

View File

@ -5,6 +5,7 @@ using System.IO.Abstractions;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
using API.Data.Repositories;
using API.Entities.Enums;
@ -90,23 +91,21 @@ public class MockReadingItemService : IReadingItemService
}
}
public class ParseScannedFilesTests : AbstractDbTest
public class ParseScannedFilesTests: AbstractDbTest
{
private readonly ILogger<ParseScannedFiles> _logger = Substitute.For<ILogger<ParseScannedFiles>>();
private readonly ScannerHelper _scannerHelper;
private readonly ITestOutputHelper _outputHelper;
public ParseScannedFilesTests(ITestOutputHelper testOutputHelper)
public ParseScannedFilesTests(ITestOutputHelper testOutputHelper): base(testOutputHelper)
{
// Since ProcessFile relies on _readingItemService, we can implement our own versions of _readingItemService so we have control over how the calls work
GlobalConfiguration.Configuration.UseInMemoryStorage();
_scannerHelper = new ScannerHelper(UnitOfWork, testOutputHelper);
_outputHelper = testOutputHelper;
}
protected override async Task ResetDb()
private async Task<ScannerHelper> Setup(IUnitOfWork unitOfWork)
{
Context.Series.RemoveRange(Context.Series.ToList());
await Context.SaveChangesAsync();
return new ScannerHelper(unitOfWork, _outputHelper);
}
#region MergeName
@ -193,6 +192,9 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task ScanLibrariesForSeries_ShouldFindFiles()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory(Root + "Data/");
fileSystem.AddFile(Root + "Data/Accel World v1.cbz", new MockFileData(string.Empty));
@ -206,20 +208,20 @@ public class ParseScannedFilesTests : AbstractDbTest
var library =
await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
await unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
Assert.NotNull(library);
library.Type = LibraryType.Manga;
var parsedSeries = await psf.ScanLibrariesForSeries(library, new List<string>() {Root + "Data/"}, false,
await UnitOfWork.SeriesRepository.GetFolderPathMap(1));
await unitOfWork.SeriesRepository.GetFolderPathMap(1));
// Assert.Equal(3, parsedSeries.Values.Count);
// Assert.NotEmpty(parsedSeries.Keys.Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
Assert.Equal(3, parsedSeries.Count);
Assert.NotEmpty(parsedSeries.Select(p => p.ParsedSeries).Where(p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World")));
Assert.Contains(parsedSeries.Select(p => p.ParsedSeries), p => p.Format == MangaFormat.Archive && p.Name.Equals("Accel World"));
}
#endregion
@ -245,15 +247,18 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task ProcessFiles_ForLibraryMode_OnlyCallsFolderActionForEachTopLevelFolder()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
var fileSystem = CreateTestFilesystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var directoriesSeen = new HashSet<string>();
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
var library = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
var scanResults = await psf.ScanFiles("C:/Data/", true, await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
var scanResults = await psf.ScanFiles("C:/Data/", true, await unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
foreach (var scanResult in scanResults)
{
directoriesSeen.Add(scanResult.Folder);
@ -265,18 +270,21 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task ProcessFiles_ForNonLibraryMode_CallsFolderActionOnce()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
var fileSystem = CreateTestFilesystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
var library = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
Assert.NotNull(library);
var directoriesSeen = new HashSet<string>();
var scanResults = await psf.ScanFiles("C:/Data/", false,
await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
await unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
foreach (var scanResult in scanResults)
{
@ -291,6 +299,9 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task ProcessFiles_ShouldCallFolderActionTwice()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory("C:/Data/");
fileSystem.AddDirectory("C:/Data/Accel World");
@ -305,10 +316,10 @@ public class ParseScannedFilesTests : AbstractDbTest
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
var library = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
Assert.NotNull(library);
var scanResults = await psf.ScanFiles("C:/Data", true, await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
var scanResults = await psf.ScanFiles("C:/Data", true, await unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
Assert.Equal(2, scanResults.Count);
}
@ -320,6 +331,9 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task ProcessFiles_ShouldCallFolderActionOnce()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
var fileSystem = new MockFileSystem();
fileSystem.AddDirectory("C:/Data/");
fileSystem.AddDirectory("C:/Data/Accel World");
@ -334,11 +348,11 @@ public class ParseScannedFilesTests : AbstractDbTest
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var library = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
var library = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
Assert.NotNull(library);
var scanResults = await psf.ScanFiles("C:/Data", false,
await UnitOfWork.SeriesRepository.GetFolderPathMap(1), library);
await unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
Assert.Single(scanResults);
}
@ -352,23 +366,26 @@ public class ParseScannedFilesTests : AbstractDbTest
//[Fact]
public async Task HasSeriesFolderNotChangedSinceLastScan_AllSeriesFoldersHaveChanges()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
const string testcase = "Subfolders always scanning all series changes - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
var scanner = scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count);
@ -391,7 +408,7 @@ public class ParseScannedFilesTests : AbstractDbTest
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
// 4 series, of which 2 have volumes as directories
var folderMap = await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id);
var folderMap = await unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id);
Assert.Equal(6, folderMap.Count);
var res = await psf.ScanFiles(testDirectoryPath, true, folderMap, postLib);
@ -404,23 +421,26 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task HasSeriesFolderNotChangedSinceLastScan_PublisherLayout()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
const string testcase = "Subfolder always scanning fix publisher layout - Comic.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
var scanner = scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count);
@ -438,7 +458,7 @@ public class ParseScannedFilesTests : AbstractDbTest
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 2.cbz"));
var res = await psf.ScanFiles(testDirectoryPath, true,
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
await unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(1, changes);
}
@ -447,23 +467,26 @@ public class ParseScannedFilesTests : AbstractDbTest
//[Fact]
public async Task SubFoldersNoSubFolders_SkipAll()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
const string testcase = "Subfolders and files at root - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
var scanner = scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -476,30 +499,33 @@ public class ParseScannedFilesTests : AbstractDbTest
await Task.Delay(1100); // Ensure at least one second has passed since library scan
var res = await psf.ScanFiles(testDirectoryPath, true,
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
await unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
Assert.DoesNotContain(res, sc => sc.HasChanged);
}
[Fact]
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
const string testcase = "Subfolders and files at root - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
var scanner = scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -508,8 +534,8 @@ public class ParseScannedFilesTests : AbstractDbTest
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
Context.Series.Update(spiceAndWolf);
await Context.SaveChangesAsync();
context.Series.Update(spiceAndWolf);
await context.SaveChangesAsync();
// Add file at series root
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
@ -517,7 +543,7 @@ public class ParseScannedFilesTests : AbstractDbTest
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
var res = await psf.ScanFiles(testDirectoryPath, true,
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
await unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(2, changes);
}
@ -525,23 +551,26 @@ public class ParseScannedFilesTests : AbstractDbTest
[Fact]
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
{
var (unitOfWork, context, mapper) = await CreateDatabase();
var scannerHelper = await Setup(unitOfWork);
const string testcase = "Subfolders and files at root - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
var scanner = scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -550,8 +579,8 @@ public class ParseScannedFilesTests : AbstractDbTest
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
Context.Series.Update(spiceAndWolf);
await Context.SaveChangesAsync();
context.Series.Update(spiceAndWolf);
await context.SaveChangesAsync();
// Add file in subfolder
var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
@ -559,7 +588,7 @@ public class ParseScannedFilesTests : AbstractDbTest
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
var res = await psf.ScanFiles(testDirectoryPath, true,
await UnitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
await unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(2, changes);
}

View File

@ -1,5 +1,6 @@
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
@ -7,17 +8,21 @@ using API.Entities.Person;
using API.Extensions;
using API.Helpers.Builders;
using API.Services;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class PersonServiceTests: AbstractDbTest
public class PersonServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
[Fact]
public async Task PersonMerge_KeepNonEmptyMetadata()
{
var ps = new PersonService(UnitOfWork);
var (unitOfWork, _, _) = await CreateDatabase();
var ps = new PersonService(unitOfWork);
var person1 = new Person
{
@ -36,13 +41,13 @@ public class PersonServiceTests: AbstractDbTest
AniListId = 27,
};
UnitOfWork.PersonRepository.Attach(person1);
UnitOfWork.PersonRepository.Attach(person2);
await UnitOfWork.CommitAsync();
unitOfWork.PersonRepository.Attach(person1);
unitOfWork.PersonRepository.Attach(person2);
await unitOfWork.CommitAsync();
await ps.MergePeopleAsync(person2, person1);
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
var allPeople = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Single(allPeople);
var person = allPeople[0];
@ -58,7 +63,9 @@ public class PersonServiceTests: AbstractDbTest
[Fact]
public async Task PersonMerge_MergedPersonDestruction()
{
var ps = new PersonService(UnitOfWork);
var (unitOfWork, _, _) = await CreateDatabase();
var ps = new PersonService(unitOfWork);
var person1 = new Person
{
@ -72,27 +79,29 @@ public class PersonServiceTests: AbstractDbTest
NormalizedName = "Delores Casey".ToNormalized(),
};
UnitOfWork.PersonRepository.Attach(person1);
UnitOfWork.PersonRepository.Attach(person2);
await UnitOfWork.CommitAsync();
unitOfWork.PersonRepository.Attach(person1);
unitOfWork.PersonRepository.Attach(person2);
await unitOfWork.CommitAsync();
await ps.MergePeopleAsync(person2, person1);
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
var allPeople = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Single(allPeople);
}
[Fact]
public async Task PersonMerge_RetentionChapters()
{
var ps = new PersonService(UnitOfWork);
var (unitOfWork, _, _) = await CreateDatabase();
var ps = new PersonService(unitOfWork);
var library = new LibraryBuilder("My Library").Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var user = new AppUserBuilder("Amelia", "amelia@localhost")
.WithLibrary(library).Build();
UnitOfWork.UserRepository.Add(user);
unitOfWork.UserRepository.Add(user);
var person = new PersonBuilder("Jillian Cowan").Build();
@ -120,26 +129,26 @@ public class PersonServiceTests: AbstractDbTest
.Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
UnitOfWork.SeriesRepository.Add(series2);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
unitOfWork.SeriesRepository.Add(series2);
await unitOfWork.CommitAsync();
await ps.MergePeopleAsync(person2, person);
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
var allPeople = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Single(allPeople);
var mergedPerson = allPeople[0];
Assert.Equal("Jillian Cowan", mergedPerson.Name);
var chapters = await UnitOfWork.PersonRepository.GetChaptersForPersonByRole(1, 1, PersonRole.Editor);
var chapters = await unitOfWork.PersonRepository.GetChaptersForPersonByRole(1, 1, PersonRole.Editor);
Assert.Equal(2, chapters.Count());
chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(1, ChapterIncludes.People);
chapter = await unitOfWork.ChapterRepository.GetChapterAsync(1, ChapterIncludes.People);
Assert.NotNull(chapter);
Assert.Single(chapter.People);
chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(2, ChapterIncludes.People);
chapter2 = await unitOfWork.ChapterRepository.GetChapterAsync(2, ChapterIncludes.People);
Assert.NotNull(chapter2);
Assert.Single(chapter2.People);
@ -149,17 +158,17 @@ public class PersonServiceTests: AbstractDbTest
[Fact]
public async Task PersonMerge_NoDuplicateChaptersOrSeries()
{
await ResetDb();
var (unitOfWork, _, _) = await CreateDatabase();
var ps = new PersonService(UnitOfWork);
var ps = new PersonService(unitOfWork);
var library = new LibraryBuilder("My Library").Build();
UnitOfWork.LibraryRepository.Add(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Add(library);
await unitOfWork.CommitAsync();
var user = new AppUserBuilder("Amelia", "amelia@localhost")
.WithLibrary(library).Build();
UnitOfWork.UserRepository.Add(user);
unitOfWork.UserRepository.Add(user);
var person = new PersonBuilder("Jillian Cowan").Build();
@ -197,39 +206,39 @@ public class PersonServiceTests: AbstractDbTest
.Build())
.Build();
UnitOfWork.SeriesRepository.Add(series);
UnitOfWork.SeriesRepository.Add(series2);
await UnitOfWork.CommitAsync();
unitOfWork.SeriesRepository.Add(series);
unitOfWork.SeriesRepository.Add(series2);
await unitOfWork.CommitAsync();
await ps.MergePeopleAsync(person2, person);
var allPeople = await UnitOfWork.PersonRepository.GetAllPeople();
var allPeople = await unitOfWork.PersonRepository.GetAllPeople();
Assert.Single(allPeople);
var mergedPerson = await UnitOfWork.PersonRepository.GetPersonById(person.Id, PersonIncludes.All);
var mergedPerson = await unitOfWork.PersonRepository.GetPersonById(person.Id, PersonIncludes.All);
Assert.NotNull(mergedPerson);
Assert.Equal(3, mergedPerson.ChapterPeople.Count);
Assert.Equal(3, mergedPerson.SeriesMetadataPeople.Count);
chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(chapter.Id, ChapterIncludes.People);
chapter = await unitOfWork.ChapterRepository.GetChapterAsync(chapter.Id, ChapterIncludes.People);
Assert.NotNull(chapter);
Assert.Equal(2, chapter.People.Count);
Assert.Single(chapter.People.Select(p => p.Person.Id).Distinct());
Assert.Contains(chapter.People, p => p.Role == PersonRole.Editor);
Assert.Contains(chapter.People, p => p.Role == PersonRole.Colorist);
chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(chapter2.Id, ChapterIncludes.People);
chapter2 = await unitOfWork.ChapterRepository.GetChapterAsync(chapter2.Id, ChapterIncludes.People);
Assert.NotNull(chapter2);
Assert.Single(chapter2.People);
Assert.Contains(chapter2.People, p => p.Role == PersonRole.Editor);
Assert.DoesNotContain(chapter2.People, p => p.Role == PersonRole.Colorist);
series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(series.Id, SeriesIncludes.Metadata);
series = await unitOfWork.SeriesRepository.GetSeriesByIdAsync(series.Id, SeriesIncludes.Metadata);
Assert.NotNull(series);
Assert.Single(series.Metadata.People);
Assert.Contains(series.Metadata.People, p => p.Role == PersonRole.Editor);
Assert.DoesNotContain(series.Metadata.People, p => p.Role == PersonRole.Colorist);
series2 = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(series2.Id, SeriesIncludes.Metadata);
series2 = await unitOfWork.SeriesRepository.GetSeriesByIdAsync(series2.Id, SeriesIncludes.Metadata);
Assert.NotNull(series2);
Assert.Equal(2, series2.Metadata.People.Count);
Assert.Contains(series2.Metadata.People, p => p.Role == PersonRole.Editor);
@ -241,16 +250,16 @@ public class PersonServiceTests: AbstractDbTest
[Fact]
public async Task PersonAddAlias_NoOverlap()
{
await ResetDb();
var (unitOfWork, _, _) = await CreateDatabase();
UnitOfWork.PersonRepository.Attach(new PersonBuilder("Jillian Cowan").Build());
UnitOfWork.PersonRepository.Attach(new PersonBuilder("Jilly Cowan").WithAlias("Jolly Cowan").Build());
await UnitOfWork.CommitAsync();
unitOfWork.PersonRepository.Attach(new PersonBuilder("Jillian Cowan").Build());
unitOfWork.PersonRepository.Attach(new PersonBuilder("Jilly Cowan").WithAlias("Jolly Cowan").Build());
await unitOfWork.CommitAsync();
var ps = new PersonService(UnitOfWork);
var ps = new PersonService(unitOfWork);
var person1 = await UnitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jillian Cowan");
var person2 = await UnitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jilly Cowan");
var person1 = await unitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jillian Cowan");
var person2 = await unitOfWork.PersonRepository.GetPersonByNameOrAliasAsync("Jilly Cowan");
Assert.NotNull(person1);
Assert.NotNull(person2);
@ -276,11 +285,4 @@ public class PersonServiceTests: AbstractDbTest
Assert.Single(person2.Aliases);
}
protected override async Task ResetDb()
{
Context.Person.RemoveRange(Context.Person.ToList());
await Context.SaveChangesAsync();
}
}

View File

@ -1,5 +1,6 @@
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs;
using API.Entities.Enums;
@ -10,25 +11,23 @@ using Hangfire;
using Hangfire.InMemory;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class RatingServiceTests: AbstractDbTest
public class RatingServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly RatingService _ratingService;
public RatingServiceTests()
{
_ratingService = new RatingService(UnitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
}
[Fact]
public async Task UpdateRating_ShouldSetRating()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var ratingService = new RatingService(unitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
Context.Library.Add(new LibraryBuilder("Test LIb")
context.Library.Add(new LibraryBuilder("Test LIb")
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
.WithSeries(new SeriesBuilder("Test")
@ -39,13 +38,13 @@ public class RatingServiceTests: AbstractDbTest
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
JobStorage.Current = new InMemoryStorage();
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
var result = await ratingService.UpdateSeriesRating(user, new UpdateRatingDto
{
SeriesId = 1,
UserRating = 3,
@ -53,7 +52,7 @@ public class RatingServiceTests: AbstractDbTest
Assert.True(result);
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))!
var ratings = (await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))!
.Ratings;
Assert.NotEmpty(ratings);
Assert.Equal(3, ratings.First().Rating);
@ -62,9 +61,10 @@ public class RatingServiceTests: AbstractDbTest
[Fact]
public async Task UpdateRating_ShouldUpdateExistingRating()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var ratingService = new RatingService(unitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
Context.Library.Add(new LibraryBuilder("Test LIb")
context.Library.Add(new LibraryBuilder("Test LIb")
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
.WithSeries(new SeriesBuilder("Test")
@ -75,11 +75,11 @@ public class RatingServiceTests: AbstractDbTest
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
var result = await ratingService.UpdateSeriesRating(user, new UpdateRatingDto
{
SeriesId = 1,
UserRating = 3,
@ -88,14 +88,14 @@ public class RatingServiceTests: AbstractDbTest
Assert.True(result);
JobStorage.Current = new InMemoryStorage();
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
var ratings = (await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
.Ratings;
Assert.NotEmpty(ratings);
Assert.Equal(3, ratings.First().Rating);
// Update the DB again
var result2 = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
var result2 = await ratingService.UpdateSeriesRating(user, new UpdateRatingDto
{
SeriesId = 1,
UserRating = 5,
@ -103,7 +103,7 @@ public class RatingServiceTests: AbstractDbTest
Assert.True(result2);
var ratings2 = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
var ratings2 = (await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings))
.Ratings;
Assert.NotEmpty(ratings2);
Assert.True(ratings2.Count == 1);
@ -113,9 +113,10 @@ public class RatingServiceTests: AbstractDbTest
[Fact]
public async Task UpdateRating_ShouldClampRatingAt5()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var ratingService = new RatingService(unitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
Context.Library.Add(new LibraryBuilder("Test LIb")
context.Library.Add(new LibraryBuilder("Test LIb")
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
.WithSeries(new SeriesBuilder("Test")
@ -125,11 +126,11 @@ public class RatingServiceTests: AbstractDbTest
.Build())
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
var result = await ratingService.UpdateSeriesRating(user, new UpdateRatingDto
{
SeriesId = 1,
UserRating = 10,
@ -138,7 +139,7 @@ public class RatingServiceTests: AbstractDbTest
Assert.True(result);
JobStorage.Current = new InMemoryStorage();
var ratings = (await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007",
var ratings = (await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007",
AppUserIncludes.Ratings)!)
.Ratings;
Assert.NotEmpty(ratings);
@ -148,9 +149,10 @@ public class RatingServiceTests: AbstractDbTest
[Fact]
public async Task UpdateRating_ShouldReturnFalseWhenSeriesDoesntExist()
{
await ResetDb();
var (unitOfWork, context, _) = await CreateDatabase();
var ratingService = new RatingService(unitOfWork, Substitute.For<IScrobblingService>(), Substitute.For<ILogger<RatingService>>());
Context.Library.Add(new LibraryBuilder("Test LIb", LibraryType.Book)
context.Library.Add(new LibraryBuilder("Test LIb", LibraryType.Book)
.WithAppUser(new AppUserBuilder("majora2007", string.Empty).Build())
.WithSeries(new SeriesBuilder("Test")
@ -160,11 +162,11 @@ public class RatingServiceTests: AbstractDbTest
.Build())
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await UnitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Ratings);
var result = await _ratingService.UpdateSeriesRating(user, new UpdateRatingDto
var result = await ratingService.UpdateSeriesRating(user, new UpdateRatingDto
{
SeriesId = 2,
UserRating = 5,
@ -175,15 +177,4 @@ public class RatingServiceTests: AbstractDbTest
var ratings = user.Ratings;
Assert.Empty(ratings);
}
protected override async Task ResetDb()
{
Context.Series.RemoveRange(Context.Series.ToList());
Context.AppUserRating.RemoveRange(Context.AppUserRating.ToList());
Context.Genre.RemoveRange(Context.Genre.ToList());
Context.CollectionTag.RemoveRange(Context.CollectionTag.ToList());
Context.Person.RemoveRange(Context.Person.ToList());
Context.Library.RemoveRange(Context.Library.ToList());
await Context.SaveChangesAsync();
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs;
using API.Entities;
@ -7,25 +8,28 @@ using API.Entities.Enums;
using API.Helpers.Builders;
using API.Services;
using API.Tests.Helpers;
using AutoMapper;
using Kavita.Common;
using Microsoft.EntityFrameworkCore;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class ReadingProfileServiceTest: AbstractDbTest
public class ReadingProfileServiceTest(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
/// <summary>
/// Does not add a default reading profile
/// </summary>
/// <returns></returns>
public async Task<(ReadingProfileService, AppUser, Library, Series)> Setup()
public async Task<(ReadingProfileService, AppUser, Library, Series)> Setup(IUnitOfWork unitOfWork, DataContext context, IMapper mapper)
{
var user = new AppUserBuilder("amelia", "amelia@localhost").Build();
Context.AppUser.Add(user);
await UnitOfWork.CommitAsync();
context.AppUser.Add(user);
await unitOfWork.CommitAsync();
var series = new SeriesBuilder("Spice and Wolf").Build();
@ -34,10 +38,10 @@ public class ReadingProfileServiceTest: AbstractDbTest
.Build();
user.Libraries.Add(library);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var rps = new ReadingProfileService(UnitOfWork, Substitute.For<ILocalizationService>(), Mapper);
user = await UnitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.UserPreferences);
var rps = new ReadingProfileService(unitOfWork, Substitute.For<ILocalizationService>(), mapper);
user = await unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.UserPreferences);
return (rps, user, library, series);
}
@ -45,8 +49,8 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task ImplicitProfileFirst()
{
await ResetDb();
var (rps, user, library, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, library, series) = await Setup(unitOfWork, context, mapper);
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithKind(ReadingProfileKind.Implicit)
@ -61,7 +65,7 @@ public class ReadingProfileServiceTest: AbstractDbTest
user.ReadingProfiles.Add(profile);
user.ReadingProfiles.Add(profile2);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var seriesProfile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
Assert.NotNull(seriesProfile);
@ -76,14 +80,14 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task CantDeleteDefaultReadingProfile()
{
await ResetDb();
var (rps, user, _, _) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, _, _) = await Setup(unitOfWork, context, mapper);
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithKind(ReadingProfileKind.Default)
.Build();
Context.AppUserReadingProfiles.Add(profile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(profile);
await unitOfWork.CommitAsync();
await Assert.ThrowsAsync<KavitaException>(async () =>
{
@ -91,21 +95,21 @@ public class ReadingProfileServiceTest: AbstractDbTest
});
var profile2 = new AppUserReadingProfileBuilder(user.Id).Build();
Context.AppUserReadingProfiles.Add(profile2);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(profile2);
await unitOfWork.CommitAsync();
await rps.DeleteReadingProfile(user.Id, profile2.Id);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var allProfiles = await Context.AppUserReadingProfiles.ToListAsync();
var allProfiles = await context.AppUserReadingProfiles.ToListAsync();
Assert.Single(allProfiles);
}
[Fact]
public async Task CreateImplicitSeriesReadingProfile()
{
await ResetDb();
var (rps, user, _, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, _, series) = await Setup(unitOfWork, context, mapper);
var dto = new UserReadingProfileDto
{
@ -125,8 +129,8 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task UpdateImplicitReadingProfile_DoesNotCreateNew()
{
await ResetDb();
var (rps, user, _, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, _, series) = await Setup(unitOfWork, context, mapper);
var dto = new UserReadingProfileDto
{
@ -154,7 +158,7 @@ public class ReadingProfileServiceTest: AbstractDbTest
Assert.Equal(ReadingProfileKind.Implicit, profile.Kind);
Assert.Equal(ReaderMode.LeftRight, profile.ReaderMode);
var implicitCount = await Context.AppUserReadingProfiles
var implicitCount = await context.AppUserReadingProfiles
.Where(p => p.Kind == ReadingProfileKind.Implicit)
.CountAsync();
Assert.Equal(1, implicitCount);
@ -163,8 +167,8 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task GetCorrectProfile()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithSeries(series)
@ -178,9 +182,9 @@ public class ReadingProfileServiceTest: AbstractDbTest
.WithKind(ReadingProfileKind.Default)
.WithName("Global")
.Build();
Context.AppUserReadingProfiles.Add(profile);
Context.AppUserReadingProfiles.Add(profile2);
Context.AppUserReadingProfiles.Add(profile3);
context.AppUserReadingProfiles.Add(profile);
context.AppUserReadingProfiles.Add(profile2);
context.AppUserReadingProfiles.Add(profile3);
var series2 = new SeriesBuilder("Rainbows After Storms").Build();
lib.Series.Add(series2);
@ -190,7 +194,7 @@ public class ReadingProfileServiceTest: AbstractDbTest
lib2.Series.Add(series3);
user.Libraries.Add(lib2);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var p = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
Assert.NotNull(p);
@ -208,8 +212,8 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task ReplaceReadingProfile()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
var profile1 = new AppUserReadingProfileBuilder(user.Id)
.WithSeries(series)
@ -220,9 +224,9 @@ public class ReadingProfileServiceTest: AbstractDbTest
.WithName("Profile 2")
.Build();
Context.AppUserReadingProfiles.Add(profile1);
Context.AppUserReadingProfiles.Add(profile2);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(profile1);
context.AppUserReadingProfiles.Add(profile2);
await unitOfWork.CommitAsync();
var profile = await rps.GetReadingProfileDtoForSeries(user.Id, series.Id);
Assert.NotNull(profile);
@ -237,19 +241,19 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task DeleteReadingProfile()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
var profile1 = new AppUserReadingProfileBuilder(user.Id)
.WithSeries(series)
.WithName("Profile 1")
.Build();
Context.AppUserReadingProfiles.Add(profile1);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(profile1);
await unitOfWork.CommitAsync();
await rps.ClearSeriesProfile(user.Id, series.Id);
var profiles = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
var profiles = await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
Assert.DoesNotContain(profiles, rp => rp.SeriesIds.Contains(series.Id));
}
@ -257,8 +261,8 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task BulkAddReadingProfiles()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
for (var i = 0; i < 10; i++)
{
@ -270,15 +274,15 @@ public class ReadingProfileServiceTest: AbstractDbTest
.WithSeries(series)
.WithName("Profile")
.Build();
Context.AppUserReadingProfiles.Add(profile);
context.AppUserReadingProfiles.Add(profile);
var profile2 = new AppUserReadingProfileBuilder(user.Id)
.WithSeries(series)
.WithName("Profile2")
.Build();
Context.AppUserReadingProfiles.Add(profile2);
context.AppUserReadingProfiles.Add(profile2);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var someSeriesIds = lib.Series.Take(lib.Series.Count / 2).Select(s => s.Id).ToList();
await rps.BulkAddProfileToSeries(user.Id, profile.Id, someSeriesIds);
@ -306,23 +310,23 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task BulkAssignDeletesImplicit()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
var implicitProfile = Mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
var implicitProfile = mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
.Build());
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithName("Profile 1")
.Build();
Context.AppUserReadingProfiles.Add(profile);
context.AppUserReadingProfiles.Add(profile);
for (var i = 0; i < 10; i++)
{
var generatedSeries = new SeriesBuilder($"Generated Series #{i}").Build();
lib.Series.Add(generatedSeries);
}
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var ids = lib.Series.Select(s => s.Id).ToList();
@ -343,7 +347,7 @@ public class ReadingProfileServiceTest: AbstractDbTest
Assert.Equal(ReadingProfileKind.User, seriesProfile.Kind);
}
var implicitCount = await Context.AppUserReadingProfiles
var implicitCount = await context.AppUserReadingProfiles
.Where(p => p.Kind == ReadingProfileKind.Implicit)
.CountAsync();
Assert.Equal(0, implicitCount);
@ -352,18 +356,18 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task AddDeletesImplicit()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
var implicitProfile = Mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
var implicitProfile = mapper.Map<UserReadingProfileDto>(new AppUserReadingProfileBuilder(user.Id)
.WithKind(ReadingProfileKind.Implicit)
.Build());
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithName("Profile 1")
.Build();
Context.AppUserReadingProfiles.Add(profile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(profile);
await unitOfWork.CommitAsync();
await rps.UpdateImplicitReadingProfile(user.Id, series.Id, implicitProfile);
@ -377,7 +381,7 @@ public class ReadingProfileServiceTest: AbstractDbTest
Assert.NotNull(seriesProfile);
Assert.Equal(ReadingProfileKind.User, seriesProfile.Kind);
var implicitCount = await Context.AppUserReadingProfiles
var implicitCount = await context.AppUserReadingProfiles
.Where(p => p.Kind == ReadingProfileKind.Implicit)
.CountAsync();
Assert.Equal(0, implicitCount);
@ -386,8 +390,8 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task CreateReadingProfile()
{
await ResetDb();
var (rps, user, lib, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, series) = await Setup(unitOfWork, context, mapper);
var dto = new UserReadingProfileDto
{
@ -419,15 +423,15 @@ public class ReadingProfileServiceTest: AbstractDbTest
await rps.CreateReadingProfile(user.Id, dto3);
});
var allProfiles = Context.AppUserReadingProfiles.ToList();
var allProfiles = context.AppUserReadingProfiles.ToList();
Assert.Equal(2, allProfiles.Count);
}
[Fact]
public async Task ClearSeriesProfile_RemovesImplicitAndUnlinksExplicit()
{
await ResetDb();
var (rps, user, _, series) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, _, series) = await Setup(unitOfWork, context, mapper);
var implicitProfile = new AppUserReadingProfileBuilder(user.Id)
.WithSeries(series)
@ -440,40 +444,40 @@ public class ReadingProfileServiceTest: AbstractDbTest
.WithName("Explicit Profile")
.Build();
Context.AppUserReadingProfiles.Add(implicitProfile);
Context.AppUserReadingProfiles.Add(explicitProfile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(implicitProfile);
context.AppUserReadingProfiles.Add(explicitProfile);
await unitOfWork.CommitAsync();
var allBefore = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
var allBefore = await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
Assert.Equal(2, allBefore.Count(rp => rp.SeriesIds.Contains(series.Id)));
await rps.ClearSeriesProfile(user.Id, series.Id);
var remainingProfiles = await Context.AppUserReadingProfiles.ToListAsync();
var remainingProfiles = await context.AppUserReadingProfiles.ToListAsync();
Assert.Single(remainingProfiles);
Assert.Equal("Explicit Profile", remainingProfiles[0].Name);
Assert.Empty(remainingProfiles[0].SeriesIds);
var profilesForSeries = await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
var profilesForSeries = await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id);
Assert.DoesNotContain(profilesForSeries, rp => rp.SeriesIds.Contains(series.Id));
}
[Fact]
public async Task AddProfileToLibrary_AddsAndOverridesExisting()
{
await ResetDb();
var (rps, user, lib, _) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, _) = await Setup(unitOfWork, context, mapper);
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithName("Library Profile")
.Build();
Context.AppUserReadingProfiles.Add(profile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(profile);
await unitOfWork.CommitAsync();
await rps.AddProfileToLibrary(user.Id, profile.Id, lib.Id);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
var linkedProfile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
var linkedProfile = (await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
Assert.NotNull(linkedProfile);
Assert.Equal(profile.Id, linkedProfile.Id);
@ -481,13 +485,13 @@ public class ReadingProfileServiceTest: AbstractDbTest
var newProfile = new AppUserReadingProfileBuilder(user.Id)
.WithName("New Profile")
.Build();
Context.AppUserReadingProfiles.Add(newProfile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(newProfile);
await unitOfWork.CommitAsync();
await rps.AddProfileToLibrary(user.Id, newProfile.Id, lib.Id);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
linkedProfile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
linkedProfile = (await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
Assert.NotNull(linkedProfile);
Assert.Equal(newProfile.Id, linkedProfile.Id);
@ -496,33 +500,33 @@ public class ReadingProfileServiceTest: AbstractDbTest
[Fact]
public async Task ClearLibraryProfile_RemovesImplicitOrUnlinksExplicit()
{
await ResetDb();
var (rps, user, lib, _) = await Setup();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (rps, user, lib, _) = await Setup(unitOfWork, context, mapper);
var implicitProfile = new AppUserReadingProfileBuilder(user.Id)
.WithKind(ReadingProfileKind.Implicit)
.WithLibrary(lib)
.Build();
Context.AppUserReadingProfiles.Add(implicitProfile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(implicitProfile);
await unitOfWork.CommitAsync();
await rps.ClearLibraryProfile(user.Id, lib.Id);
var profile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
var profile = (await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
Assert.Null(profile);
var explicitProfile = new AppUserReadingProfileBuilder(user.Id)
.WithLibrary(lib)
.Build();
Context.AppUserReadingProfiles.Add(explicitProfile);
await UnitOfWork.CommitAsync();
context.AppUserReadingProfiles.Add(explicitProfile);
await unitOfWork.CommitAsync();
await rps.ClearLibraryProfile(user.Id, lib.Id);
profile = (await UnitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
profile = (await unitOfWork.AppUserReadingProfileRepository.GetProfilesForUser(user.Id))
.FirstOrDefault(rp => rp.LibraryIds.Contains(lib.Id));
Assert.Null(profile);
var stillExists = await Context.AppUserReadingProfiles.FindAsync(explicitProfile.Id);
var stillExists = await context.AppUserReadingProfiles.FindAsync(explicitProfile.Id);
Assert.NotNull(stillExists);
}
@ -531,8 +535,10 @@ public class ReadingProfileServiceTest: AbstractDbTest
/// is worth having.
/// </summary>
[Fact]
public void UpdateFields_UpdatesAll()
public async Task UpdateFields_UpdatesAll()
{
var (_, _, mapper) = await CreateDatabase();
// Repeat to ensure booleans are flipped and actually tested
for (int i = 0; i < 10; i++)
{
@ -544,18 +550,11 @@ public class ReadingProfileServiceTest: AbstractDbTest
ReadingProfileService.UpdateReaderProfileFields(profile, dto);
var newDto = Mapper.Map<UserReadingProfileDto>(profile);
var newDto = mapper.Map<UserReadingProfileDto>(profile);
Assert.True(RandfHelper.AreSimpleFieldsEqual(dto, newDto,
["<Id>k__BackingField", "<UserId>k__BackingField"]));
}
}
protected override async Task ResetDb()
{
Context.AppUserReadingProfiles.RemoveRange(Context.AppUserReadingProfiles);
await UnitOfWork.CommitAsync();
}
}

View File

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
using API.Data.Repositories;
using API.Entities;
@ -16,57 +17,52 @@ using Xunit.Abstractions;
namespace API.Tests.Services;
public class ScannerServiceTests : AbstractDbTest
public class ScannerServiceTests: AbstractDbTest
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ScannerHelper _scannerHelper;
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/ScanTests");
public ScannerServiceTests(ITestOutputHelper testOutputHelper)
public ScannerServiceTests(ITestOutputHelper testOutputHelper): base(testOutputHelper)
{
_testOutputHelper = testOutputHelper;
// Set up Hangfire to use in-memory storage for testing
GlobalConfiguration.Configuration.UseInMemoryStorage();
_scannerHelper = new ScannerHelper(UnitOfWork, testOutputHelper);
}
protected override async Task ResetDb()
{
Context.Library.RemoveRange(Context.Library);
await Context.SaveChangesAsync();
}
protected async Task SetAllSeriesLastScannedInThePast(Library library, TimeSpan? duration = null)
protected async Task SetAllSeriesLastScannedInThePast(DataContext context, Library library, TimeSpan? duration = null)
{
foreach (var series in library.Series)
{
await SetLastScannedInThePast(series, duration, false);
await SetLastScannedInThePast(context, series, duration, false);
}
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
}
protected async Task SetLastScannedInThePast(Series series, TimeSpan? duration = null, bool save = true)
protected async Task SetLastScannedInThePast(DataContext context, Series series, TimeSpan? duration = null, bool save = true)
{
duration ??= TimeSpan.FromMinutes(2);
series.LastFolderScanned = DateTime.Now.Subtract(duration.Value);
Context.Series.Update(series);
context.Series.Update(series);
if (save)
{
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
}
}
[Fact]
public async Task ScanLibrary_ComicVine_PublisherFolder()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
var testcase = "Publisher - ComicVine.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count);
@ -75,11 +71,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_ShouldCombineNestedFolder()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
var testcase = "Series and Series-Series Combined - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -90,11 +89,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_FlatSeries()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Flat Series - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -106,11 +108,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_FlatSeriesWithSpecialFolder()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Flat Series with Specials Folder Alt Naming - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -121,11 +126,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_FlatSeriesWithSpecialFolder_AlternativeNaming()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Flat Series with Specials Folder Alt Naming - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -136,12 +144,15 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_FlatSeriesWithSpecial()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Flat Special - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -152,12 +163,15 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_SeriesWithUnbalancedParenthesis()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Scan Library Parses as ( - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -173,6 +187,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_LocalizedSeries()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series with Localized - Manga.json";
// Get the first file and generate a ComicInfo
@ -183,12 +200,12 @@ public class ScannerServiceTests : AbstractDbTest
LocalizedSeries = "Sono Bisque Doll wa Koi wo Suru"
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -198,6 +215,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_LocalizedSeries2()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series with Localized 2 - Manga.json";
// Get the first file and generate a ComicInfo
@ -208,12 +228,12 @@ public class ScannerServiceTests : AbstractDbTest
LocalizedSeries = "Futoku no Guild" // Filename has a capital N and localizedSeries has lowercase
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -230,6 +250,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_ExtraShouldNotAffect()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series with Extra - Manga.json";
// Get the first file and generate a ComicInfo
@ -239,12 +262,12 @@ public class ScannerServiceTests : AbstractDbTest
Series = "The Novel's Extra",
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -261,14 +284,17 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_ImageSeries_SpecialGrouping()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Image Series with SP Folder - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -281,14 +307,17 @@ public class ScannerServiceTests : AbstractDbTest
/// </summary>
public async Task ScanLibrary_ImageSeries_SpecialGrouping_NonEnglish()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Image Series with SP Folder (Non English) - Image.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -305,6 +334,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_PublishersInheritFromChapters()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Flat Special - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
@ -321,12 +353,12 @@ public class ScannerServiceTests : AbstractDbTest
Publisher = "Chapter Publisher"
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -343,14 +375,17 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_LooseChapters_Pdf()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "PDF Comic Chapters - Comic.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -362,14 +397,17 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_LooseChapters_Pdf_LN()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "PDF Comic Chapters - LightNovel.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -384,6 +422,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanSeries_NewChapterInNestedFolder()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series with Localized - Manga.json";
// Get the first file and generate a ComicInfo
@ -394,12 +435,12 @@ public class ScannerServiceTests : AbstractDbTest
LocalizedSeries = "Sono Bisque Doll wa Koi wo Suru"
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -409,7 +450,7 @@ public class ScannerServiceTests : AbstractDbTest
// Bootstrap a new file in the nested "Sono Bisque Doll wa Koi wo Suru" directory and perform a series scan
var testDirectory = Path.Combine(_testDirectory, Path.GetFileNameWithoutExtension(testcase));
await _scannerHelper.Scaffold(testDirectory, ["My Dress-Up Darling/Sono Bisque Doll wa Koi wo Suru ch 11.cbz"]);
await scannerHelper.Scaffold(testDirectory, ["My Dress-Up Darling/Sono Bisque Doll wa Koi wo Suru ch 11.cbz"]);
// Now that a new file exists in the subdirectory, scan again
await scanner.ScanSeries(series.Id);
@ -421,6 +462,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_LocalizedSeries_MatchesFilename()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Localized Name matches Filename - Manga.json";
// Get the first file and generate a ComicInfo
@ -431,12 +475,12 @@ public class ScannerServiceTests : AbstractDbTest
LocalizedSeries = "Futoku no Guild"
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -449,6 +493,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_LocalizedSeries_MatchesFilename_SameNames()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Localized Name matches Filename - Manga.json";
// Get the first file and generate a ComicInfo
@ -459,12 +506,12 @@ public class ScannerServiceTests : AbstractDbTest
LocalizedSeries = "Futoku no Guild"
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -477,20 +524,23 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_ExcludePattern_Works()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Exclude Pattern 1 - Manga.json";
// Get the first file and generate a ComicInfo
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
library.LibraryExcludePatterns = [new LibraryExcludePattern() { Pattern = "**/Extra/*" }];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -501,20 +551,23 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_ExcludePattern_FlippedSlashes_Works()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Exclude Pattern 1 - Manga.json";
// Get the first file and generate a ComicInfo
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
library.LibraryExcludePatterns = [new LibraryExcludePattern() { Pattern = "**\\Extra\\*" }];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -525,11 +578,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_MultipleRoots_MultipleScans_DataPersists_Forced()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Multiple Roots - Manga.json";
// Get the first file and generate a ComicInfo
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath =
Path.Join(
@ -541,13 +597,13 @@ public class ScannerServiceTests : AbstractDbTest
new FolderPath() {Path = Path.Join(testDirectoryPath, "Root 2")}
];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(2, postLib.Series.Count);
@ -563,7 +619,7 @@ public class ScannerServiceTests : AbstractDbTest
// Rescan to ensure nothing changes yet again
await scanner.ScanLibrary(library.Id, true);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.Equal(2, postLib.Series.Count);
s = postLib.Series.First(s => s.Name == "Plush");
Assert.Equal(3, s.Volumes.Count);
@ -578,11 +634,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_MultipleRoots_MultipleScans_DataPersists_NonForced()
{
var (unitOfWork, context, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Multiple Roots - Manga.json";
// Get the first file and generate a ComicInfo
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath =
Path.Join(
@ -594,13 +653,13 @@ public class ScannerServiceTests : AbstractDbTest
new FolderPath() {Path = Path.Join(testDirectoryPath, "Root 2")}
];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(2, postLib.Series.Count);
@ -614,16 +673,17 @@ public class ScannerServiceTests : AbstractDbTest
File.Copy(Path.Join(root1PlushFolder, "Plush v02.cbz"), Path.Join(root1PlushFolder, "Plush v03.cbz"));
// Emulate time passage by updating lastFolderScan to be a min in the past
await SetLastScannedInThePast(s);
await SetLastScannedInThePast(context, s);
// Rescan to ensure nothing changes yet again
await scanner.ScanLibrary(library.Id, false);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(2, postLib.Series.Count);
s = postLib.Series.First(s => s.Name == "Plush");
s = postLib.Series.First(series => series.Name == "Plush");
Assert.Equal(3, s.Volumes.Count);
s2 = postLib.Series.First(s => s.Name == "Accel");
s2 = postLib.Series.First(series => series.Name == "Accel");
Assert.Single(s2.Volumes);
}
@ -631,11 +691,14 @@ public class ScannerServiceTests : AbstractDbTest
public async Task ScanLibrary_AlternatingRemoval_IssueReplication()
{
// https://github.com/Kareadita/Kavita/issues/3476#issuecomment-2661635558
var (unitOfWork, context, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Alternating Removal - Manga.json";
// Setup: Generate test library
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = Path.Combine(Directory.GetCurrentDirectory(),
"../../../Services/Test Data/ScannerService/ScanTests",
@ -647,14 +710,14 @@ public class ScannerServiceTests : AbstractDbTest
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 2") }
];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
// First Scan: Everything should be added
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Contains(postLib.Series, s => s.Name == "Accel");
@ -662,19 +725,19 @@ public class ScannerServiceTests : AbstractDbTest
// Second Scan: Remove Root 2, expect Accel to be removed
library.Folders = [new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 1") }];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
// Emulate time passage by updating lastFolderScan to be a min in the past
foreach (var s in postLib.Series)
{
s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
Context.Series.Update(s);
context.Series.Update(s);
}
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
await scanner.ScanLibrary(library.Id);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.DoesNotContain(postLib.Series, s => s.Name == "Accel"); // Ensure Accel is gone
Assert.Contains(postLib.Series, s => s.Name == "Plush");
@ -685,30 +748,32 @@ public class ScannerServiceTests : AbstractDbTest
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 1") },
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 2") }
];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
// Emulate time passage by updating lastFolderScan to be a min in the past
foreach (var s in postLib.Series)
{
s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
Context.Series.Update(s);
context.Series.Update(s);
}
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
await scanner.ScanLibrary(library.Id);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Contains(postLib.Series, s => s.Name == "Accel"); // Accel should be back
Assert.Contains(postLib.Series, s => s.Name == "Plush");
// Emulate time passage by updating lastFolderScan to be a min in the past
await SetAllSeriesLastScannedInThePast(postLib);
await SetAllSeriesLastScannedInThePast(context, postLib);
// Fourth Scan: Run again to check stability (should not remove Accel)
await scanner.ScanLibrary(library.Id);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Contains(postLib.Series, s => s.Name == "Accel");
Assert.Contains(postLib.Series, s => s.Name == "Plush");
}
@ -716,11 +781,14 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_DeleteSeriesInUI_ComeBack()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Delete Series In UI - Manga.json";
// Setup: Generate test library
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = Path.Combine(Directory.GetCurrentDirectory(),
"../../../Services/Test Data/ScannerService/ScanTests",
@ -732,14 +800,14 @@ public class ScannerServiceTests : AbstractDbTest
new FolderPath() { Path = Path.Combine(testDirectoryPath, "Root 2") }
];
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
// First Scan: Everything should be added
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Contains(postLib.Series, s => s.Name == "Accel");
@ -747,15 +815,16 @@ public class ScannerServiceTests : AbstractDbTest
// Second Scan: Delete the Series
library.Series = [];
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Empty(postLib.Series);
await scanner.ScanLibrary(library.Id);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Contains(postLib.Series, s => s.Name == "Accel"); // Ensure Accel is gone
Assert.Contains(postLib.Series, s => s.Name == "Plush");
}
@ -763,18 +832,21 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task SubFolders_NoRemovals_ChangesFound()
{
var (unitOfWork, context, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Subfolders always scanning all series changes - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count);
@ -790,7 +862,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count));
await SetAllSeriesLastScannedInThePast(postLib);
await SetAllSeriesLastScannedInThePast(context, postLib);
// Add a new chapter to a volume of the series, and scan. Validate that no chapters were lost, and the new
// chapter was added
@ -800,9 +872,9 @@ public class ScannerServiceTests : AbstractDbTest
Path.Join(executionerCopyDir, "The Executioner and Her Way of Life Vol. 1 Ch. 0002.cbz"));
await scanner.ScanLibrary(library.Id);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count);
@ -822,18 +894,21 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task RemovalPickedUp_NoOtherChanges()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series removed when no other changes are made - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(2, postLib.Series.Count);
@ -841,9 +916,9 @@ public class ScannerServiceTests : AbstractDbTest
Directory.Delete(executionerCopyDir, true);
await scanner.ScanLibrary(library.Id);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Single(postLib.Series, s => s.Name == "Spice and Wolf");
@ -854,18 +929,21 @@ public class ScannerServiceTests : AbstractDbTest
public async Task SubFoldersNoSubFolders_CorrectPickupAfterAdd()
{
// This test case is used in multiple tests and can result in conflict if not separated
var (unitOfWork, context, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Subfolders and files at root (2) - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -873,7 +951,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(3, spiceAndWolf.Volumes.Count);
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
await SetLastScannedInThePast(spiceAndWolf);
await SetLastScannedInThePast(context, spiceAndWolf);
// Add volume to Spice and Wolf series directory
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
@ -882,7 +960,7 @@ public class ScannerServiceTests : AbstractDbTest
await scanner.ScanLibrary(library.Id);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -890,7 +968,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(4, spiceAndWolf.Volumes.Count);
Assert.Equal(5, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
await SetLastScannedInThePast(spiceAndWolf);
await SetLastScannedInThePast(context, spiceAndWolf);
// Add file in subfolder
spiceAndWolfDir = Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3");
@ -899,7 +977,7 @@ public class ScannerServiceTests : AbstractDbTest
await scanner.ScanLibrary(library.Id);
postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
@ -916,14 +994,17 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_SortOrderWorks()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Sort Order - Manga.json";
var library = await _scannerHelper.GenerateScannerData(testcase);
var library = await scannerHelper.GenerateScannerData(testcase);
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
// Get the loose leaf volume and confirm each chapter aligns with expectation of Sort Order
@ -943,6 +1024,9 @@ public class ScannerServiceTests : AbstractDbTest
[Fact]
public async Task ScanLibrary_MetadataDisabled_NoOverrides()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series with Localized No Metadata - Manga.json";
// Get the first file and generate a ComicInfo
@ -953,17 +1037,17 @@ public class ScannerServiceTests : AbstractDbTest
LocalizedSeries = "Futoku no Guild" // Filename has a capital N and localizedSeries has lowercase
});
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var library = await scannerHelper.GenerateScannerData(testcase, infos);
// Disable metadata
library.EnableMetadata = false;
UnitOfWork.LibraryRepository.Update(library);
await UnitOfWork.CommitAsync();
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await UnitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
// Validate that there are 2 series
Assert.NotNull(postLib);
@ -972,4 +1056,30 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Contains(postLib.Series, x => x.Name == "Immoral Guild");
Assert.Contains(postLib.Series, x => x.Name == "Futoku No Guild");
}
[Fact]
public async Task ScanLibrary_SortName_NoPrefix()
{
var (unitOfWork, _, _) = await CreateDatabase();
var scannerHelper = new ScannerHelper(unitOfWork, _testOutputHelper);
const string testcase = "Series with Prefix - Book.json";
var library = await scannerHelper.GenerateScannerData(testcase);
library.RemovePrefixForSortName = true;
unitOfWork.LibraryRepository.Update(library);
await unitOfWork.CommitAsync();
var scanner = scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Equal("The Avengers", postLib.Series.First().Name);
Assert.Equal("Avengers", postLib.Series.First().SortName);
}
}

View File

@ -2,6 +2,7 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs.Scrobbling;
using API.Entities;
@ -11,15 +12,18 @@ using API.Helpers.Builders;
using API.Services;
using API.Services.Plus;
using API.SignalR;
using Hangfire.Storage.SQLite.Entities;
using Kavita.Common;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
#nullable enable
public class ScrobblingServiceTests : AbstractDbTest
public class ScrobblingServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private const int ChapterPages = 100;
@ -34,58 +38,43 @@ public class ScrobblingServiceTests : AbstractDbTest
private const string ValidJwtToken =
"eyJhbGciOiJIUzI1NiJ9.eyJJc3N1ZXIiOiJJc3N1ZXIiLCJleHAiOjcyNzI0NTAxMTcsImlhdCI6MTc1MDAyMTMxN30.zADmcGq_BfxbcV8vy4xw5Cbzn4COkmVINxgqpuL17Ng";
private readonly ScrobblingService _service;
private readonly ILicenseService _licenseService;
private readonly ILocalizationService _localizationService;
private readonly ILogger<ScrobblingService> _logger;
private readonly IEmailService _emailService;
private readonly IKavitaPlusApiService _kavitaPlusApiService;
/// <summary>
/// IReaderService, without the ScrobblingService injected
///
/// </summary>
private readonly IReaderService _readerService;
/// <summary>
/// IReaderService, with the _service injected
/// </summary>
private readonly IReaderService _hookedUpReaderService;
public ScrobblingServiceTests()
/// <param name="unitOfWork"></param>
/// <param name="context"></param>
/// <returns>First IReaderService is not hooked up to the scrobbleService, second one is</returns>
public async Task<(ScrobblingService, ILicenseService, IKavitaPlusApiService, IReaderService, IReaderService)> Setup(IUnitOfWork unitOfWork, DataContext context)
{
_licenseService = Substitute.For<ILicenseService>();
_localizationService = Substitute.For<ILocalizationService>();
_logger = Substitute.For<ILogger<ScrobblingService>>();
_emailService = Substitute.For<IEmailService>();
_kavitaPlusApiService = Substitute.For<IKavitaPlusApiService>();
var licenseService = Substitute.For<ILicenseService>();
var localizationService = Substitute.For<ILocalizationService>();
var logger = Substitute.For<ILogger<ScrobblingService>>();
var emailService = Substitute.For<IEmailService>();
var kavitaPlusApiService = Substitute.For<IKavitaPlusApiService>();
_service = new ScrobblingService(UnitOfWork, Substitute.For<IEventHub>(), _logger, _licenseService,
_localizationService, _emailService, _kavitaPlusApiService);
var service = new ScrobblingService(unitOfWork, Substitute.For<IEventHub>(), logger, licenseService,
localizationService, emailService, kavitaPlusApiService);
_readerService = new ReaderService(UnitOfWork,
var readerService = new ReaderService(unitOfWork,
Substitute.For<ILogger<ReaderService>>(),
Substitute.For<IEventHub>(),
Substitute.For<IImageService>(),
Substitute.For<IDirectoryService>(),
Substitute.For<IScrobblingService>()); // Do not use the actual one
_hookedUpReaderService = new ReaderService(UnitOfWork,
var hookedUpReaderService = new ReaderService(unitOfWork,
Substitute.For<ILogger<ReaderService>>(),
Substitute.For<IEventHub>(),
Substitute.For<IImageService>(),
Substitute.For<IDirectoryService>(),
_service);
service);
await SeedData(unitOfWork, context);
return (service, licenseService, kavitaPlusApiService, readerService, hookedUpReaderService);
}
protected override async Task ResetDb()
{
Context.ScrobbleEvent.RemoveRange(Context.ScrobbleEvent.ToList());
Context.Series.RemoveRange(Context.Series.ToList());
Context.Library.RemoveRange(Context.Library.ToList());
Context.AppUser.RemoveRange(Context.AppUser.ToList());
await UnitOfWork.CommitAsync();
}
private async Task SeedData()
private async Task SeedData(IUnitOfWork unitOfWork, DataContext context)
{
var series = new SeriesBuilder("Test Series")
.WithFormat(MangaFormat.Archive)
@ -122,7 +111,7 @@ public class ScrobblingServiceTests : AbstractDbTest
.Build();
Context.Library.Add(library);
context.Library.Add(library);
var user = new AppUserBuilder("testuser", "testuser")
//.WithPreferences(new UserPreferencesBuilder().WithAniListScrobblingEnabled(true).Build())
@ -130,13 +119,17 @@ public class ScrobblingServiceTests : AbstractDbTest
user.UserPreferences.AniListScrobblingEnabled = true;
UnitOfWork.UserRepository.Add(user);
unitOfWork.UserRepository.Add(user);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
}
private async Task<ScrobbleEvent> CreateScrobbleEvent(int? seriesId = null)
{
var (unitOfWork, context, _) = await CreateDatabase();
await Setup(unitOfWork, context);
var evt = new ScrobbleEvent
{
ScrobbleEventType = ScrobbleEventType.ChapterRead,
@ -148,7 +141,7 @@ public class ScrobblingServiceTests : AbstractDbTest
if (seriesId != null)
{
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId.Value);
var series = await unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId.Value);
if (series != null) evt.Series = series;
}
@ -161,7 +154,10 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task PostScrobbleUpdate_AuthErrors()
{
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
var (unitOfWork, context, _) = await CreateDatabase();
var (service, _, kavitaPlusApiService, _, _) = await Setup(unitOfWork, context);
kavitaPlusApiService.PostScrobbleUpdate(null!, "")
.ReturnsForAnyArgs(new ScrobbleResponseDto()
{
ErrorMessage = "Unauthorized"
@ -170,7 +166,7 @@ public class ScrobblingServiceTests : AbstractDbTest
var evt = await CreateScrobbleEvent();
await Assert.ThrowsAsync<KavitaException>(async () =>
{
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
await service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
});
Assert.True(evt.IsErrored);
Assert.Equal("Kavita+ subscription no longer active", evt.ErrorDetails);
@ -179,24 +175,26 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task PostScrobbleUpdate_UnknownSeriesLoggedAsError()
{
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
var (unitOfWork, context, _) = await CreateDatabase();
var (service, _, kavitaPlusApiService, _, _) = await Setup(unitOfWork, context);
kavitaPlusApiService.PostScrobbleUpdate(null!, "")
.ReturnsForAnyArgs(new ScrobbleResponseDto()
{
ErrorMessage = "Unknown Series"
});
await SeedData();
var evt = await CreateScrobbleEvent(1);
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
await UnitOfWork.CommitAsync();
await service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
await unitOfWork.CommitAsync();
Assert.True(evt.IsErrored);
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
var series = await unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
Assert.NotNull(series);
Assert.True(series.IsBlacklisted);
var errors = await UnitOfWork.ScrobbleRepository.GetAllScrobbleErrorsForSeries(1);
var errors = await unitOfWork.ScrobbleRepository.GetAllScrobbleErrorsForSeries(1);
Assert.Single(errors);
Assert.Equal("Series cannot be matched for Scrobbling", errors.First().Comment);
Assert.Equal(series.Id, errors.First().SeriesId);
@ -205,7 +203,10 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task PostScrobbleUpdate_InvalidAccessToken()
{
_kavitaPlusApiService.PostScrobbleUpdate(null!, "")
var (unitOfWork, context, _) = await CreateDatabase();
var (service, _, kavitaPlusApiService, _, _) = await Setup(unitOfWork, context);
kavitaPlusApiService.PostScrobbleUpdate(null!, "")
.ReturnsForAnyArgs(new ScrobbleResponseDto()
{
ErrorMessage = "Access token is invalid"
@ -215,7 +216,7 @@ public class ScrobblingServiceTests : AbstractDbTest
await Assert.ThrowsAsync<KavitaException>(async () =>
{
await _service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
await service.PostScrobbleUpdate(new ScrobbleDto(), "", evt);
});
Assert.True(evt.IsErrored);
@ -229,76 +230,76 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ProcessReadEvents_CreatesNoEventsWhenNoProgress()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, kavitaPlusApiService, _, _) = await Setup(unitOfWork, context);
// Set Returns
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
_kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
.Returns(100);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1);
Assert.NotNull(user);
// Ensure CanProcessScrobbleEvent returns true
user.AniListAccessToken = ValidJwtToken;
UnitOfWork.UserRepository.Update(user);
await UnitOfWork.CommitAsync();
unitOfWork.UserRepository.Update(user);
await unitOfWork.CommitAsync();
var chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(4);
var chapter = await unitOfWork.ChapterRepository.GetChapterAsync(4);
Assert.NotNull(chapter);
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
var volume = await unitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
Assert.NotNull(volume);
// Call Scrobble without having any progress
await _service.ScrobbleReadingUpdate(1, 1);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Empty(events);
}
[Fact]
public async Task ProcessReadEvents_UpdateVolumeAndChapterData()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, kavitaPlusApiService, readerService, _) = await Setup(unitOfWork, context);
// Set Returns
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
_kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
kavitaPlusApiService.GetRateLimit(Arg.Any<string>(), Arg.Any<string>())
.Returns(100);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1);
Assert.NotNull(user);
// Ensure CanProcessScrobbleEvent returns true
user.AniListAccessToken = ValidJwtToken;
UnitOfWork.UserRepository.Update(user);
await UnitOfWork.CommitAsync();
unitOfWork.UserRepository.Update(user);
await unitOfWork.CommitAsync();
var chapter = await UnitOfWork.ChapterRepository.GetChapterAsync(4);
var chapter = await unitOfWork.ChapterRepository.GetChapterAsync(4);
Assert.NotNull(chapter);
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
var volume = await unitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
Assert.NotNull(volume);
// Mark something as read to trigger event creation
await _readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
await unitOfWork.CommitAsync();
// Call Scrobble while having some progress
await _service.ScrobbleReadingUpdate(user.Id, 1);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(user.Id, 1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Single(events);
// Give it some (more) read progress
await _readerService.MarkChaptersAsRead(user, 1, volume.Chapters);
await _readerService.MarkChaptersAsRead(user, 1, [chapter]);
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, 1, volume.Chapters);
await readerService.MarkChaptersAsRead(user, 1, [chapter]);
await unitOfWork.CommitAsync();
await _service.ProcessUpdatesSinceLastSync();
await service.ProcessUpdatesSinceLastSync();
await _kavitaPlusApiService.Received(1).PostScrobbleUpdate(
await kavitaPlusApiService.Received(1).PostScrobbleUpdate(
Arg.Is<ScrobbleDto>(data =>
data.ChapterNumber == (int)chapter.MaxNumber &&
data.VolumeNumber == (int)volume.MaxNumber
@ -313,84 +314,84 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleReadingUpdate_IgnoreNoLicense()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
_licenseService.HasActiveLicense().Returns(false);
licenseService.HasActiveLicense().Returns(false);
await _service.ScrobbleReadingUpdate(1, 1);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Empty(events);
}
[Fact]
public async Task ScrobbleReadingUpdate_RemoveWhenNoProgress()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, readerService, hookedUpReaderService) = await Setup(unitOfWork, context);
_licenseService.HasActiveLicense().Returns(true);
licenseService.HasActiveLicense().Returns(true);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1);
Assert.NotNull(user);
var volume = await UnitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
var volume = await unitOfWork.VolumeRepository.GetVolumeAsync(1, VolumeIncludes.Chapters);
Assert.NotNull(volume);
await _readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, 1, new List<Chapter>() {volume.Chapters[0]});
await unitOfWork.CommitAsync();
await _service.ScrobbleReadingUpdate(1, 1);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Single(events);
var readEvent = events.First();
Assert.False(readEvent.IsProcessed);
await _hookedUpReaderService.MarkSeriesAsUnread(user, 1);
await UnitOfWork.CommitAsync();
await hookedUpReaderService.MarkSeriesAsUnread(user, 1);
await unitOfWork.CommitAsync();
// Existing event is deleted
await _service.ScrobbleReadingUpdate(1, 1);
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Empty(events);
await _hookedUpReaderService.MarkSeriesAsUnread(user, 1);
await UnitOfWork.CommitAsync();
await hookedUpReaderService.MarkSeriesAsUnread(user, 1);
await unitOfWork.CommitAsync();
// No new events are added
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Empty(events);
}
[Fact]
public async Task ScrobbleReadingUpdate_UpdateExistingNotIsProcessed()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, readerService, _) = await Setup(unitOfWork, context);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1);
Assert.NotNull(user);
var chapter1 = await UnitOfWork.ChapterRepository.GetChapterAsync(1);
var chapter2 = await UnitOfWork.ChapterRepository.GetChapterAsync(2);
var chapter3 = await UnitOfWork.ChapterRepository.GetChapterAsync(3);
var chapter1 = await unitOfWork.ChapterRepository.GetChapterAsync(1);
var chapter2 = await unitOfWork.ChapterRepository.GetChapterAsync(2);
var chapter3 = await unitOfWork.ChapterRepository.GetChapterAsync(3);
Assert.NotNull(chapter1);
Assert.NotNull(chapter2);
Assert.NotNull(chapter3);
_licenseService.HasActiveLicense().Returns(true);
licenseService.HasActiveLicense().Returns(true);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Empty(events);
await _readerService.MarkChaptersAsRead(user, 1, [chapter1]);
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, 1, [chapter1]);
await unitOfWork.CommitAsync();
// Scrobble update
await _service.ScrobbleReadingUpdate(1, 1);
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Single(events);
var readEvent = events[0];
@ -399,25 +400,25 @@ public class ScrobblingServiceTests : AbstractDbTest
// Mark as processed
readEvent.IsProcessed = true;
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
await _readerService.MarkChaptersAsRead(user, 1, [chapter2]);
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, 1, [chapter2]);
await unitOfWork.CommitAsync();
// Scrobble update
await _service.ScrobbleReadingUpdate(1, 1);
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Equal(2, events.Count);
Assert.Single(events.Where(e => e.IsProcessed).ToList());
Assert.Single(events.Where(e => !e.IsProcessed).ToList());
// Should update the existing non processed event
await _readerService.MarkChaptersAsRead(user, 1, [chapter3]);
await UnitOfWork.CommitAsync();
await readerService.MarkChaptersAsRead(user, 1, [chapter3]);
await unitOfWork.CommitAsync();
// Scrobble update
await _service.ScrobbleReadingUpdate(1, 1);
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleReadingUpdate(1, 1);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Equal(2, events.Count);
Assert.Single(events.Where(e => e.IsProcessed).ToList());
Assert.Single(events.Where(e => !e.IsProcessed).ToList());
@ -430,18 +431,19 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_WantToRead_ShouldCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// Act
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
await service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Assert
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Equal(ScrobbleEventType.AddWantToRead, events[0].ScrobbleEventType);
Assert.Equal(userId, events[0].AppUserId);
@ -450,18 +452,19 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_RemoveWantToRead_ShouldCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// Act
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
await service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Assert
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Equal(ScrobbleEventType.RemoveWantToRead, events[0].ScrobbleEventType);
Assert.Equal(userId, events[0].AppUserId);
@ -470,21 +473,22 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_WantToRead_ShouldNotCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create an event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
await service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Act - Try to create the same event again
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
await service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Assert
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.All(events, e => Assert.Equal(ScrobbleEventType.AddWantToRead, e.ScrobbleEventType));
@ -493,21 +497,22 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_RemoveWantToRead_ShouldAddRemoveEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create a want-to-read event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
await service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Act - Now remove from want-to-read
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
await service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Assert
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.RemoveWantToRead);
@ -516,21 +521,22 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_RemoveWantToRead_ShouldNotCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create a remove-from-want-to-read event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
await service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Act - Try to create the same event again
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
await service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Assert
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.All(events, e => Assert.Equal(ScrobbleEventType.RemoveWantToRead, e.ScrobbleEventType));
@ -539,21 +545,22 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_WantToRead_ShouldAddWantToReadEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create a remove-from-want-to-read event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
await service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Act - Now add to want-to-read
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
await service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Assert
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.AddWantToRead);
@ -566,47 +573,47 @@ public class ScrobblingServiceTests : AbstractDbTest
[Fact]
public async Task ScrobbleRatingUpdate_IgnoreNoLicense()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
_licenseService.HasActiveLicense().Returns(false);
licenseService.HasActiveLicense().Returns(false);
await _service.ScrobbleRatingUpdate(1, 1, 1);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleRatingUpdate(1, 1, 1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Empty(events);
}
[Fact]
public async Task ScrobbleRatingUpdate_UpdateExistingNotIsProcessed()
{
await ResetDb();
await SeedData();
var (unitOfWork, context, _) = await CreateDatabase();
var (service, licenseService, _, _, _) = await Setup(unitOfWork, context);
_licenseService.HasActiveLicense().Returns(true);
licenseService.HasActiveLicense().Returns(true);
var user = await UnitOfWork.UserRepository.GetUserByIdAsync(1);
var user = await unitOfWork.UserRepository.GetUserByIdAsync(1);
Assert.NotNull(user);
var series = await UnitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
var series = await unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
Assert.NotNull(series);
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 1);
var events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleRatingUpdate(user.Id, series.Id, 1);
var events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Single(events);
Assert.Equal(1, events.First().Rating);
// Mark as processed
events.First().IsProcessed = true;
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Equal(2, events.Count);
Assert.Single(events, evt => evt.IsProcessed);
Assert.Single(events, evt => !evt.IsProcessed);
await _service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
events = await UnitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
await service.ScrobbleRatingUpdate(user.Id, series.Id, 5);
events = await unitOfWork.ScrobbleRepository.GetAllEventsForSeries(1);
Assert.Single(events, evt => !evt.IsProcessed);
Assert.Equal(5, events.First(evt => !evt.IsProcessed).Rating);

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,10 @@
using System.Collections.Generic;
using System.IO.Abstractions;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.DTOs;
using API.DTOs.KavitaPlus.Metadata;
using API.Entities;
using API.Entities.Enums;
@ -20,6 +22,11 @@ public class SettingsServiceTests
private readonly ISettingsService _settingsService;
private readonly IUnitOfWork _mockUnitOfWork;
private const string DefaultAgeKey = "default_age";
private const string DefaultFieldSource = "default_source";
private readonly static AgeRating DefaultAgeRating = AgeRating.Everyone;
private readonly static MetadataFieldType DefaultSourceField = MetadataFieldType.Genre;
public SettingsServiceTests()
{
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
@ -27,9 +34,195 @@ public class SettingsServiceTests
_mockUnitOfWork = Substitute.For<IUnitOfWork>();
_settingsService = new SettingsService(_mockUnitOfWork, ds,
Substitute.For<ILibraryWatcher>(), Substitute.For<ITaskScheduler>(),
Substitute.For<ILogger<SettingsService>>());
Substitute.For<ILogger<SettingsService>>(), Substitute.For<IOidcService>());
}
#region ImportMetadataSettings
[Fact]
public async Task ImportFieldMappings_ReplaceMode()
{
var existingSettings = CreateDefaultMetadataSettingsDto();
var newSettings = new MetadataSettingsDto
{
Whitelist = ["new_whitelist_item"],
Blacklist = ["new_blacklist_item"],
AgeRatingMappings = new Dictionary<string, AgeRating> { ["new_age"] = AgeRating.R18Plus },
FieldMappings =
[
new MetadataFieldMappingDto { Id = 10, SourceValue = "new_source", SourceType = MetadataFieldType.Genre, DestinationValue = "new_dest", DestinationType = MetadataFieldType.Tag }
],
};
var importSettings = new ImportSettingsDto
{
ImportMode = ImportMode.Replace,
Whitelist = true,
Blacklist = true,
AgeRatings = true,
FieldMappings = true,
Resolution = ConflictResolution.Manual,
AgeRatingConflictResolutions = [],
};
var settingsRepo = Substitute.For<ISettingsRepository>();
settingsRepo.GetMetadataSettingDto().Returns(existingSettings);
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
var result = await _settingsService.ImportFieldMappings(newSettings, importSettings);
Assert.True(result.Success);
Assert.Empty(result.AgeRatingConflicts);
Assert.Equal(existingSettings.Whitelist, newSettings.Whitelist);
Assert.Equal(existingSettings.Blacklist, newSettings.Blacklist);
Assert.Equal(existingSettings.AgeRatingMappings, newSettings.AgeRatingMappings);
Assert.Equal(existingSettings.FieldMappings, newSettings.FieldMappings);
}
[Fact]
public async Task ImportFieldMappings_MergeMode_WithNoConflicts()
{
var existingSettingsDto = CreateDefaultMetadataSettingsDto();
var existingSettings = CreateDefaultMetadataSettings();
var newSettings = new MetadataSettingsDto
{
Whitelist = ["new_whitelist_item"],
Blacklist = ["new_blacklist_item"],
AgeRatingMappings = new Dictionary<string, AgeRating> { ["new_age"] = AgeRating.R18Plus },
FieldMappings =
[
new MetadataFieldMappingDto { Id = 10, SourceValue = "new_source", SourceType = MetadataFieldType.Genre, DestinationValue = "new_dest", DestinationType = MetadataFieldType.Tag },
],
};
var importSettings = new ImportSettingsDto
{
ImportMode = ImportMode.Merge,
Whitelist = true,
Blacklist = true,
AgeRatings = true,
FieldMappings = true,
Resolution = ConflictResolution.Manual,
AgeRatingConflictResolutions = [],
};
var settingsRepo = Substitute.For<ISettingsRepository>();
settingsRepo.GetMetadataSettingDto().Returns(existingSettingsDto);
settingsRepo.GetMetadataSettings().Returns(existingSettings);
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
var result = await _settingsService.ImportFieldMappings(newSettings, importSettings);
Assert.True(result.Success);
Assert.Empty(result.AgeRatingConflicts);
Assert.Contains("default_white", existingSettingsDto.Whitelist);
Assert.Contains("new_whitelist_item", existingSettingsDto.Whitelist);
Assert.Contains("default_black", existingSettingsDto.Blacklist);
Assert.Contains("new_blacklist_item", existingSettingsDto.Blacklist);
Assert.Equal(2, existingSettingsDto.AgeRatingMappings.Count);
Assert.Equal(2, existingSettingsDto.FieldMappings.Count);
}
[Fact]
public async Task ImportFieldMappings_MergeMode_UseConfiguredOverrides()
{
var existingSettingsDto = CreateDefaultMetadataSettingsDto();
var existingSettings = CreateDefaultMetadataSettings();
var newSettings = new MetadataSettingsDto
{
Whitelist = [],
Blacklist = [],
AgeRatingMappings = new Dictionary<string, AgeRating> { [DefaultAgeKey] = AgeRating.R18Plus },
FieldMappings =
[
new MetadataFieldMappingDto
{
Id = 20,
SourceValue = DefaultFieldSource,
SourceType = DefaultSourceField,
DestinationValue = "different_dest",
DestinationType = MetadataFieldType.Genre,
}
],
};
var importSettings = new ImportSettingsDto
{
ImportMode = ImportMode.Merge,
Whitelist = false,
Blacklist = false,
AgeRatings = true,
FieldMappings = true,
Resolution = ConflictResolution.Manual,
AgeRatingConflictResolutions = new Dictionary<string, ConflictResolution> { [DefaultAgeKey] = ConflictResolution.Replace },
};
var settingsRepo = Substitute.For<ISettingsRepository>();
settingsRepo.GetMetadataSettingDto().Returns(existingSettingsDto);
settingsRepo.GetMetadataSettings().Returns(existingSettings);
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
var result = await _settingsService.ImportFieldMappings(newSettings, importSettings);
Assert.True(result.Success);
Assert.Empty(result.AgeRatingConflicts);
Assert.Equal(AgeRating.R18Plus, existingSettingsDto.AgeRatingMappings[DefaultAgeKey]);
}
[Fact]
public async Task ImportFieldMappings_MergeMode_SkipIdenticalMappings()
{
var existingSettingsDto = CreateDefaultMetadataSettingsDto();
var existingSettings = CreateDefaultMetadataSettings();
var newSettings = new MetadataSettingsDto
{
Whitelist = [],
Blacklist = [],
AgeRatingMappings = new Dictionary<string, AgeRating> { ["existing_age"] = AgeRating.Mature }, // Same value
FieldMappings =
[
new MetadataFieldMappingDto
{
Id = 20,
SourceValue = "existing_source",
SourceType = MetadataFieldType.Genre,
DestinationValue = "existing_dest", // Same destination
DestinationType = MetadataFieldType.Tag // Same destination type
}
],
};
var importSettings = new ImportSettingsDto
{
ImportMode = ImportMode.Merge,
Whitelist = false,
Blacklist = false,
AgeRatings = true,
FieldMappings = true,
Resolution = ConflictResolution.Manual,
AgeRatingConflictResolutions = [],
};
var settingsRepo = Substitute.For<ISettingsRepository>();
settingsRepo.GetMetadataSettingDto().Returns(existingSettingsDto);
settingsRepo.GetMetadataSettings().Returns(existingSettings);
_mockUnitOfWork.SettingsRepository.Returns(settingsRepo);
var result = await _settingsService.ImportFieldMappings(newSettings, importSettings);
Assert.True(result.Success);
Assert.Empty(result.AgeRatingConflicts);
}
#endregion
#region UpdateMetadataSettings
[Fact]
@ -289,4 +482,46 @@ public class SettingsServiceTests
}
#endregion
private MetadataSettingsDto CreateDefaultMetadataSettingsDto()
{
return new MetadataSettingsDto
{
Whitelist = ["default_white"],
Blacklist = ["default_black"],
AgeRatingMappings = new Dictionary<string, AgeRating> { ["default_age"] = AgeRating.Everyone },
FieldMappings =
[
new MetadataFieldMappingDto
{
Id = 1,
SourceValue = "default_source",
SourceType = MetadataFieldType.Genre,
DestinationValue = "default_dest",
DestinationType = MetadataFieldType.Tag
},
],
};
}
private MetadataSettings CreateDefaultMetadataSettings()
{
return new MetadataSettings
{
Whitelist = ["default_white"],
Blacklist = ["default_black"],
AgeRatingMappings = new Dictionary<string, AgeRating> { [DefaultAgeKey] = DefaultAgeRating },
FieldMappings =
[
new MetadataFieldMapping
{
Id = 1,
SourceValue = DefaultFieldSource,
SourceType = DefaultSourceField,
DestinationValue = "default_dest",
DestinationType = MetadataFieldType.Tag
},
],
};
}
}

View File

@ -18,37 +18,25 @@ using Xunit.Abstractions;
namespace API.Tests.Services;
public abstract class SiteThemeServiceTest : AbstractDbTest
public class SiteThemeServiceTest(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ITestOutputHelper _testOutputHelper = outputHelper;
private readonly IEventHub _messageHub = Substitute.For<IEventHub>();
protected SiteThemeServiceTest(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
}
protected override async Task ResetDb()
{
Context.SiteTheme.RemoveRange(Context.SiteTheme);
await Context.SaveChangesAsync();
// Recreate defaults
await Seed.SeedThemes(Context);
}
[Fact]
public async Task UpdateDefault_ShouldThrowOnInvalidId()
{
await ResetDb();
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldThrowOnInvalidId] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
var (unitOfWork, context, mapper) = await CreateDatabase();
await Seed.SeedThemes(context);
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldThrowOnInvalidId] All Themes: {(await unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
var filesystem = CreateFileSystem();
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
var siteThemeService = new ThemeService(ds, unitOfWork, _messageHub, Substitute.For<IFileService>(),
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
Context.SiteTheme.Add(new SiteTheme()
context.SiteTheme.Add(new SiteTheme()
{
Name = "Custom",
NormalizedName = "Custom".ToNormalized(),
@ -56,10 +44,10 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
FileName = "custom.css",
IsDefault = false
});
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var ex = await Assert.ThrowsAsync<KavitaException>(() => siteThemeService.UpdateDefault(10));
Assert.Equal("Theme file missing or invalid", ex.Message);
Assert.Equal("theme-doesnt-exist", ex.Message);
}
@ -67,15 +55,17 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
[Fact]
public async Task GetContent_ShouldReturnContent()
{
await ResetDb();
_testOutputHelper.WriteLine($"[GetContent_ShouldReturnContent] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
var (unitOfWork, context, mapper) = await CreateDatabase();
await Seed.SeedThemes(context);
_testOutputHelper.WriteLine($"[GetContent_ShouldReturnContent] All Themes: {(await unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
var filesystem = CreateFileSystem();
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
var siteThemeService = new ThemeService(ds, unitOfWork, _messageHub, Substitute.For<IFileService>(),
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
Context.SiteTheme.Add(new SiteTheme()
context.SiteTheme.Add(new SiteTheme()
{
Name = "Custom",
NormalizedName = "Custom".ToNormalized(),
@ -83,9 +73,9 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
FileName = "custom.css",
IsDefault = false
});
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var content = await siteThemeService.GetContent((await UnitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom")).Id);
var content = await siteThemeService.GetContent((await unitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom")).Id);
Assert.NotNull(content);
Assert.NotEmpty(content);
Assert.Equal("123", content);
@ -94,15 +84,17 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
[Fact]
public async Task UpdateDefault_ShouldHaveOneDefault()
{
await ResetDb();
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldHaveOneDefault] All Themes: {(await UnitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
var (unitOfWork, context, mapper) = await CreateDatabase();
await Seed.SeedThemes(context);
_testOutputHelper.WriteLine($"[UpdateDefault_ShouldHaveOneDefault] All Themes: {(await unitOfWork.SiteThemeRepository.GetThemes()).Count(t => t.IsDefault)}");
var filesystem = CreateFileSystem();
filesystem.AddFile($"{SiteThemeDirectory}custom.css", new MockFileData("123"));
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var siteThemeService = new ThemeService(ds, UnitOfWork, _messageHub, Substitute.For<IFileService>(),
var siteThemeService = new ThemeService(ds, unitOfWork, _messageHub, Substitute.For<IFileService>(),
Substitute.For<ILogger<ThemeService>>(), Substitute.For<IMemoryCache>());
Context.SiteTheme.Add(new SiteTheme()
context.SiteTheme.Add(new SiteTheme()
{
Name = "Custom",
NormalizedName = "Custom".ToNormalized(),
@ -110,16 +102,16 @@ public abstract class SiteThemeServiceTest : AbstractDbTest
FileName = "custom.css",
IsDefault = false
});
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var customTheme = (await UnitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom"));
var customTheme = (await unitOfWork.SiteThemeRepository.GetThemeDtoByName("Custom"));
Assert.NotNull(customTheme);
await siteThemeService.UpdateDefault(customTheme.Id);
Assert.Equal(customTheme.Id, (await UnitOfWork.SiteThemeRepository.GetDefaultTheme()).Id);
Assert.Equal(customTheme.Id, (await unitOfWork.SiteThemeRepository.GetDefaultTheme()).Id);
}
}

View File

@ -1,5 +1,6 @@
using API.Helpers.Builders;
using API.Services.Plus;
using Xunit.Abstractions;
namespace API.Tests.Services;
using System.Collections.Generic;
@ -21,105 +22,29 @@ using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
public class TachiyomiServiceTests
public class TachiyomiServiceTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly IUnitOfWork _unitOfWork;
private readonly IMapper _mapper;
private readonly DataContext _context;
private readonly ReaderService _readerService;
private readonly TachiyomiService _tachiyomiService;
private const string CacheDirectory = "C:/kavita/config/cache/";
private const string CoverImageDirectory = "C:/kavita/config/covers/";
private const string BackupDirectory = "C:/kavita/config/backups/";
private const string DataDirectory = "C:/data/";
public TachiyomiServiceTests()
public (IReaderService, ITachiyomiService) Setup(IUnitOfWork unitOfWork, IMapper mapper)
{
var contextOptions = new DbContextOptionsBuilder().UseSqlite(CreateInMemoryDatabase()).Options;
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
var config = new MapperConfiguration(cfg => cfg.AddProfile<AutoMapperProfiles>());
_mapper = config.CreateMapper();
_unitOfWork = new UnitOfWork(_context, _mapper, null);
_readerService = new ReaderService(_unitOfWork, Substitute.For<ILogger<ReaderService>>(),
var readerService = new ReaderService(unitOfWork, Substitute.For<ILogger<ReaderService>>(),
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()),
Substitute.For<IScrobblingService>());
_tachiyomiService = new TachiyomiService(_unitOfWork, _mapper, Substitute.For<ILogger<TachiyomiService>>(), _readerService);
var tachiyomiService = new TachiyomiService(unitOfWork, mapper, Substitute.For<ILogger<TachiyomiService>>(), readerService);
return (readerService, tachiyomiService);
}
#region Setup
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");
connection.Open();
return connection;
}
private async Task<bool> SeedDb()
{
await _context.Database.MigrateAsync();
var filesystem = CreateFileSystem();
await Seed.SeedSettings(_context,
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem));
var setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.CacheDirectory).SingleAsync();
setting.Value = CacheDirectory;
setting = await _context.ServerSetting.Where(s => s.Key == ServerSettingKey.BackupDirectory).SingleAsync();
setting.Value = BackupDirectory;
_context.ServerSetting.Update(setting);
_context.Library.Add(
new LibraryBuilder("Manga")
.WithFolderPath(new FolderPathBuilder("C:/data/").Build())
.Build()
);
return await _context.SaveChangesAsync() > 0;
}
private async Task ResetDb()
{
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
private static MockFileSystem CreateFileSystem()
{
var fileSystem = new MockFileSystem();
fileSystem.Directory.SetCurrentDirectory("C:/kavita/");
fileSystem.AddDirectory("C:/kavita/config/");
fileSystem.AddDirectory(CacheDirectory);
fileSystem.AddDirectory(CoverImageDirectory);
fileSystem.AddDirectory(BackupDirectory);
fileSystem.AddDirectory(DataDirectory);
return fileSystem;
}
#endregion
#region GetLatestChapter
[Fact]
public async Task GetLatestChapter_ShouldReturnChapter_NoProgress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -145,7 +70,7 @@ public class TachiyomiServiceTests
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -154,9 +79,9 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Null(latestChapter);
}
@ -164,7 +89,8 @@ public class TachiyomiServiceTests
[Fact]
public async Task GetLatestChapter_ShouldReturnMaxChapter_CompletelyRead()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -189,7 +115,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -198,16 +124,16 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _readerService.MarkSeriesAsRead(user,1);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await readerService.MarkSeriesAsRead(user,1);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("96", latestChapter.Number);
}
@ -215,7 +141,8 @@ public class TachiyomiServiceTests
[Fact]
public async Task GetLatestChapter_ShouldReturnHighestChapter_Progress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -240,7 +167,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -249,16 +176,16 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _tachiyomiService.MarkChaptersUntilAsRead(user,1,21);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await tachiyomiService.MarkChaptersUntilAsRead(user,1,21);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("21", latestChapter.Number);
}
@ -266,7 +193,8 @@ public class TachiyomiServiceTests
[Fact]
public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -291,7 +219,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -300,24 +228,25 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _tachiyomiService.MarkChaptersUntilAsRead(user,1,1/10_000F);
await tachiyomiService.MarkChaptersUntilAsRead(user,1,1/10_000F);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("0.0001", latestChapter.Number);
}
[Fact]
public async Task GetLatestChapter_ShouldReturnEncodedVolume_Progress2()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder("1")
@ -339,7 +268,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -348,17 +277,17 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _readerService.MarkSeriesAsRead(user, 1);
await readerService.MarkSeriesAsRead(user, 1);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("0.0003", latestChapter.Number);
}
@ -366,7 +295,8 @@ public class TachiyomiServiceTests
[Fact]
public async Task GetLatestChapter_ShouldReturnEncodedYearlyVolume_Progress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -389,7 +319,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -398,16 +328,16 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _tachiyomiService.MarkChaptersUntilAsRead(user,1,2002/10_000F);
await tachiyomiService.MarkChaptersUntilAsRead(user,1,2002/10_000F);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("0.2002", latestChapter.Number);
}
@ -419,7 +349,8 @@ public class TachiyomiServiceTests
[Fact]
public async Task MarkChaptersUntilAsRead_ShouldReturnChapter_NoProgress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -444,7 +375,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -453,16 +384,17 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Null(latestChapter);
}
[Fact]
public async Task MarkChaptersUntilAsRead_ShouldReturnMaxChapter_CompletelyRead()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -487,7 +419,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -496,15 +428,15 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _readerService.MarkSeriesAsRead(user,1);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await readerService.MarkSeriesAsRead(user,1);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("96", latestChapter.Number);
}
@ -512,7 +444,8 @@ public class TachiyomiServiceTests
[Fact]
public async Task MarkChaptersUntilAsRead_ShouldReturnHighestChapter_Progress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
@ -537,7 +470,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -546,22 +479,23 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _tachiyomiService.MarkChaptersUntilAsRead(user,1,21);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await tachiyomiService.MarkChaptersUntilAsRead(user,1,21);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("21", latestChapter.Number);
}
[Fact]
public async Task MarkChaptersUntilAsRead_ShouldReturnEncodedVolume_Progress()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var (readerService, tachiyomiService) = Setup(unitOfWork, mapper);
var series = new SeriesBuilder("Test")
.WithVolume(new VolumeBuilder(API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)
.WithChapter(new ChapterBuilder("95").WithPages(1).Build())
@ -585,7 +519,7 @@ public class TachiyomiServiceTests
.WithSeries(series)
.Build();
_context.AppUser.Add(new AppUser()
context.AppUser.Add(new AppUser()
{
UserName = "majora2007",
Libraries = new List<Library>()
@ -594,16 +528,16 @@ public class TachiyomiServiceTests
}
});
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
var user = await unitOfWork.UserRepository.GetUserByUsernameAsync("majora2007", AppUserIncludes.Progress);
await _tachiyomiService.MarkChaptersUntilAsRead(user,1,1/10_000F);
await tachiyomiService.MarkChaptersUntilAsRead(user,1,1/10_000F);
await _context.SaveChangesAsync();
await context.SaveChangesAsync();
var latestChapter = await _tachiyomiService.GetLatestChapter(1, 1);
var latestChapter = await tachiyomiService.GetLatestChapter(1, 1);
Assert.Equal("0.0001", latestChapter.Number);
}

View File

@ -0,0 +1,3 @@
[
"The Avengers/The Avengers vol 1.pdf"
]

View File

@ -3,6 +3,7 @@ using System.IO;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
@ -14,38 +15,34 @@ using API.Services.Tasks.Metadata;
using API.SignalR;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Polly;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services;
public class WordCountAnalysisTests : AbstractDbTest
public class WordCountAnalysisTests(ITestOutputHelper outputHelper): AbstractDbTest(outputHelper)
{
private readonly IReaderService _readerService;
private readonly string _testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/BookService");
private const long WordCount = 33608; // 37417 if splitting on space, 33608 if just character count
private const long MinHoursToRead = 1;
private const float AvgHoursToRead = 1.66954792f;
private const long MaxHoursToRead = 3;
public WordCountAnalysisTests()
private IReaderService Setup(IUnitOfWork unitOfWork)
{
_readerService = new ReaderService(UnitOfWork, Substitute.For<ILogger<ReaderService>>(),
return new ReaderService(unitOfWork, Substitute.For<ILogger<ReaderService>>(),
Substitute.For<IEventHub>(), Substitute.For<IImageService>(),
new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new MockFileSystem()),
Substitute.For<IScrobblingService>());
}
protected override async Task ResetDb()
{
Context.Series.RemoveRange(Context.Series.ToList());
await Context.SaveChangesAsync();
}
[Fact]
public async Task ReadingTimeShouldBeNonZero()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var readerService = Setup(unitOfWork);
var series = new SeriesBuilder("Test Series")
.WithFormat(MangaFormat.Epub)
.Build();
@ -57,7 +54,7 @@ public class WordCountAnalysisTests : AbstractDbTest
MangaFormat.Epub).Build())
.Build();
Context.Library.Add(new LibraryBuilder("Test LIb", LibraryType.Book)
context.Library.Add(new LibraryBuilder("Test LIb", LibraryType.Book)
.WithSeries(series)
.Build());
@ -68,12 +65,12 @@ public class WordCountAnalysisTests : AbstractDbTest
.Build(),
};
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var cacheService = new CacheHelper(new FileService());
var service = new WordCountAnalyzerService(Substitute.For<ILogger<WordCountAnalyzerService>>(), UnitOfWork,
Substitute.For<IEventHub>(), cacheService, _readerService, Substitute.For<IMediaErrorService>());
var service = new WordCountAnalyzerService(Substitute.For<ILogger<WordCountAnalyzerService>>(), unitOfWork,
Substitute.For<IEventHub>(), cacheService, readerService, Substitute.For<IMediaErrorService>());
await service.ScanSeries(1, 1);
@ -101,7 +98,9 @@ public class WordCountAnalysisTests : AbstractDbTest
[Fact]
public async Task ReadingTimeShouldIncreaseWhenNewBookAdded()
{
await ResetDb();
var (unitOfWork, context, mapper) = await CreateDatabase();
var readerService = Setup(unitOfWork);
var chapter = new ChapterBuilder("")
.WithFile(new MangaFileBuilder(
Path.Join(_testDirectory,
@ -115,17 +114,17 @@ public class WordCountAnalysisTests : AbstractDbTest
.Build())
.Build();
Context.Library.Add(new LibraryBuilder("Test", LibraryType.Book)
context.Library.Add(new LibraryBuilder("Test", LibraryType.Book)
.WithSeries(series)
.Build());
await Context.SaveChangesAsync();
await context.SaveChangesAsync();
var cacheService = new CacheHelper(new FileService());
var service = new WordCountAnalyzerService(Substitute.For<ILogger<WordCountAnalyzerService>>(), UnitOfWork,
Substitute.For<IEventHub>(), cacheService, _readerService, Substitute.For<IMediaErrorService>());
var service = new WordCountAnalyzerService(Substitute.For<ILogger<WordCountAnalyzerService>>(), unitOfWork,
Substitute.For<IEventHub>(), cacheService, readerService, Substitute.For<IMediaErrorService>());
await service.ScanSeries(1, 1);
var chapter2 = new ChapterBuilder("2")
@ -141,7 +140,7 @@ public class WordCountAnalysisTests : AbstractDbTest
.Build());
series.Volumes[0].Chapters.Add(chapter2);
await UnitOfWork.CommitAsync();
await unitOfWork.CommitAsync();
await service.ScanSeries(1, 1);

View File

@ -51,8 +51,8 @@
<ItemGroup>
<PackageReference Include="CsvHelper" Version="33.1.0" />
<PackageReference Include="MailKit" Version="4.12.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.6">
<PackageReference Include="MailKit" Version="4.13.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
@ -66,20 +66,21 @@
<PackageReference Include="Hangfire.InMemory" Version="1.0.0" />
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
<PackageReference Include="Hangfire.Storage.SQLite" Version="0.4.2" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.1" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.2" />
<PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.8.20" />
<PackageReference Include="Microsoft.AspNetCore.SignalR" Version="1.2.0" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="9.0.6" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="9.0.6" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="9.0.6" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="9.0.6" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="9.0.7" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="9.0.7" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="9.0.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="9.0.7" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="9.0.7" />
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="3.0.1" />
<PackageReference Include="MimeTypeMapOfficial" Version="1.0.17" />
<PackageReference Include="Nager.ArticleNumber" Version="1.0.7" />
<PackageReference Include="NetVips" Version="3.1.0" />
<PackageReference Include="NetVips.Native" Version="8.17.0.1" />
<PackageReference Include="NetVips.Native" Version="8.17.1" />
<PackageReference Include="Polly" Version="8.6.2" />
<PackageReference Include="Serilog" Version="4.3.0" />
<PackageReference Include="Serilog.AspNetCore" Version="9.0.0" />
<PackageReference Include="Serilog.Enrichers.Thread" Version="4.0.0" />
@ -91,15 +92,15 @@
<PackageReference Include="Serilog.Sinks.SignalR.Core" Version="0.1.2" />
<PackageReference Include="SharpCompress" Version="0.40.0" />
<PackageReference Include="SixLabors.ImageSharp" Version="3.1.10" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="10.11.0.117924">
<PackageReference Include="SonarAnalyzer.CSharp" Version="10.15.0.120848">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.1" />
<PackageReference Include="Swashbuckle.AspNetCore.Filters" Version="8.0.3" />
<PackageReference Include="System.Drawing.Common" Version="9.0.6" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="8.12.0" />
<PackageReference Include="System.IO.Abstractions" Version="22.0.14" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="9.0.3" />
<PackageReference Include="Swashbuckle.AspNetCore.Filters" Version="9.0.0" />
<PackageReference Include="System.Drawing.Common" Version="9.0.7" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="8.13.0" />
<PackageReference Include="System.IO.Abstractions" Version="22.0.15" />
<PackageReference Include="VersOne.Epub" Version="3.3.4" />
<PackageReference Include="YamlDotNet" Version="16.3.0" />
</ItemGroup>

View File

@ -10,6 +10,7 @@ using API.Data.Repositories;
using API.DTOs;
using API.DTOs.Account;
using API.DTOs.Email;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Errors;
@ -52,6 +53,7 @@ public class AccountController : BaseApiController
private readonly IEmailService _emailService;
private readonly IEventHub _eventHub;
private readonly ILocalizationService _localizationService;
private readonly IOidcService _oidcService;
/// <inheritdoc />
public AccountController(UserManager<AppUser> userManager,
@ -60,7 +62,8 @@ public class AccountController : BaseApiController
ILogger<AccountController> logger,
IMapper mapper, IAccountService accountService,
IEmailService emailService, IEventHub eventHub,
ILocalizationService localizationService)
ILocalizationService localizationService,
IOidcService oidcService)
{
_userManager = userManager;
_signInManager = signInManager;
@ -72,6 +75,50 @@ public class AccountController : BaseApiController
_emailService = emailService;
_eventHub = eventHub;
_localizationService = localizationService;
_oidcService = oidcService;
}
/// <summary>
/// Returns true if OIDC authentication cookies are present
/// </summary>
/// <remarks>Makes not guarantee about their validity</remarks>
/// <returns></returns>
[AllowAnonymous]
[HttpGet("oidc-authenticated")]
public ActionResult<bool> OidcAuthenticated()
{
return HttpContext.Request.Cookies.ContainsKey(OidcService.CookieName);
}
/// <summary>
/// Returns the current user, as it would from login
/// </summary>
/// <returns></returns>
/// <exception cref="UnauthorizedAccessException"></exception>
/// <remarks>Does not return tokens for the user</remarks>
/// <remarks>Updates the last active date for the user</remarks>
[HttpGet]
public async Task<ActionResult<UserDto>> GetCurrentUserAsync()
{
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(User.GetUserId(), AppUserIncludes.UserPreferences | AppUserIncludes.SideNavStreams);
if (user == null) throw new UnauthorizedAccessException();
var roles = await _userManager.GetRolesAsync(user);
if (!roles.Contains(PolicyConstants.LoginRole) && !roles.Contains(PolicyConstants.AdminRole)) return Unauthorized(await _localizationService.Translate(user.Id, "disabled-account"));
try
{
user.UpdateLastActive();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update last active for {UserName}", user.UserName);
}
_unitOfWork.UserRepository.Update(user);
await _unitOfWork.CommitAsync();
return Ok(await ConstructUserDto(user, roles, false));
}
/// <summary>
@ -151,10 +198,10 @@ public class AccountController : BaseApiController
if (!result.Succeeded) return BadRequest(result.Errors);
// Assign default streams
AddDefaultStreamsToUser(user);
_accountService.AddDefaultStreamsToUser(user);
// Assign default reading profile
await AddDefaultReadingProfileToUser(user);
await _accountService.AddDefaultReadingProfileToUser(user);
var token = await _userManager.GenerateEmailConfirmationTokenAsync(user);
if (string.IsNullOrEmpty(token)) return BadRequest(await _localizationService.Get("en", "confirm-token-gen"));
@ -224,6 +271,11 @@ public class AccountController : BaseApiController
var roles = await _userManager.GetRolesAsync(user);
if (!roles.Contains(PolicyConstants.LoginRole)) return Unauthorized(await _localizationService.Translate(user.Id, "disabled-account"));
var oidcConfig = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).OidcConfig;
// Setting only takes effect if OIDC is functional, and if we're not logging in via ApiKey
var disablePasswordAuthentication = oidcConfig is {Enabled: true, DisablePasswordAuthentication: true} && string.IsNullOrEmpty(loginDto.ApiKey);
if (disablePasswordAuthentication && !roles.Contains(PolicyConstants.AdminRole)) return Unauthorized(await _localizationService.Translate(user.Id, "password-authentication-disabled"));
if (string.IsNullOrEmpty(loginDto.ApiKey))
{
var result = await _signInManager
@ -249,7 +301,14 @@ public class AccountController : BaseApiController
}
// Update LastActive on account
user.UpdateLastActive();
try
{
user.UpdateLastActive();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update last active for {UserName}", user.UserName);
}
// NOTE: This can likely be removed
user.UserPreferences ??= new AppUserPreferences
@ -262,18 +321,28 @@ public class AccountController : BaseApiController
_logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive);
return Ok(await ConstructUserDto(user, roles));
}
private async Task<UserDto> ConstructUserDto(AppUser user, IList<string> roles, bool includeTokens = true)
{
var dto = _mapper.Map<UserDto>(user);
dto.Token = await _tokenService.CreateToken(user);
dto.RefreshToken = await _tokenService.CreateRefreshToken(user);
dto.KavitaVersion = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion))
.Value;
if (includeTokens)
{
dto.Token = await _tokenService.CreateToken(user);
dto.RefreshToken = await _tokenService.CreateRefreshToken(user);
}
dto.Roles = roles;
dto.KavitaVersion = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion)).Value;
var pref = await _unitOfWork.UserRepository.GetPreferencesAsync(user.UserName!);
if (pref == null) return Ok(dto);
if (pref == null) return dto;
pref.Theme ??= await _unitOfWork.SiteThemeRepository.GetDefaultTheme();
dto.Preferences = _mapper.Map<UserPreferencesDto>(pref);
return Ok(dto);
return dto;
}
/// <summary>
@ -286,13 +355,9 @@ public class AccountController : BaseApiController
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(User.GetUserId(), AppUserIncludes.UserPreferences);
if (user == null) return Unauthorized();
var dto = _mapper.Map<UserDto>(user);
dto.Token = await _tokenService.CreateToken(user);
dto.RefreshToken = await _tokenService.CreateRefreshToken(user);
dto.KavitaVersion = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion))
.Value;
dto.Preferences = _mapper.Map<UserPreferencesDto>(user.UserPreferences);
return Ok(dto);
var roles = await _userManager.GetRolesAsync(user);
return Ok(await ConstructUserDto(user, roles, !HttpContext.Request.Cookies.ContainsKey(OidcService.CookieName)));
}
/// <summary>
@ -505,6 +570,7 @@ public class AccountController : BaseApiController
/// </summary>
/// <param name="dto"></param>
/// <returns></returns>
/// <remarks>Users who's <see cref="AppUser.IdentityProvider"/> is not <see cref="IdentityProvider.Kavita"/> cannot be edited if <see cref="OidcConfigDto.SyncUserSettings"/> is true</remarks>
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("update")]
public async Task<ActionResult> UpdateAccount(UpdateUserDto dto)
@ -517,6 +583,16 @@ public class AccountController : BaseApiController
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(dto.UserId, AppUserIncludes.SideNavStreams);
if (user == null) return BadRequest(await _localizationService.Translate(User.GetUserId(), "no-user"));
try
{
if (await _accountService.ChangeIdentityProvider(User.GetUserId(), user, dto.IdentityProvider)) return Ok();
}
catch (KavitaException exception)
{
return BadRequest(exception.Message);
}
// Check if username is changing
if (!user.UserName!.Equals(dto.Username))
{
@ -670,10 +746,10 @@ public class AccountController : BaseApiController
if (!result.Succeeded) return BadRequest(result.Errors);
// Assign default streams
AddDefaultStreamsToUser(user);
_accountService.AddDefaultStreamsToUser(user);
// Assign default reading profile
await AddDefaultReadingProfileToUser(user);
await _accountService.AddDefaultReadingProfileToUser(user);
// Assign Roles
var roles = dto.Roles;
@ -772,29 +848,6 @@ public class AccountController : BaseApiController
return BadRequest(await _localizationService.Translate(User.GetUserId(), "generic-invite-user"));
}
private void AddDefaultStreamsToUser(AppUser user)
{
foreach (var newStream in Seed.DefaultStreams.Select(stream => _mapper.Map<AppUserDashboardStream, AppUserDashboardStream>(stream)))
{
user.DashboardStreams.Add(newStream);
}
foreach (var stream in Seed.DefaultSideNavStreams.Select(stream => _mapper.Map<AppUserSideNavStream, AppUserSideNavStream>(stream)))
{
user.SideNavStreams.Add(stream);
}
}
private async Task AddDefaultReadingProfileToUser(AppUser user)
{
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithName("Default Profile")
.WithKind(ReadingProfileKind.Default)
.Build();
_unitOfWork.AppUserReadingProfileRepository.Add(profile);
await _unitOfWork.CommitAsync();
}
/// <summary>
/// Last step in authentication flow, confirms the email token for email
/// </summary>

View File

@ -16,6 +16,7 @@ using API.Extensions;
using API.Helpers.Builders;
using API.Services;
using API.Services.Tasks.Scanner;
using API.Services.Tasks.Scanner.Parser;
using API.SignalR;
using AutoMapper;
using EasyCaching.Core;
@ -83,6 +84,7 @@ public class LibraryController : BaseApiController
.WithManageReadingLists(dto.ManageReadingLists)
.WithAllowScrobbling(dto.AllowScrobbling)
.WithAllowMetadataMatching(dto.AllowMetadataMatching)
.WithEnableMetadata(dto.EnableMetadata)
.Build();
library.LibraryFileTypes = dto.FileGroupTypes
@ -173,6 +175,26 @@ public class LibraryController : BaseApiController
return Ok(_directoryService.ListDirectory(path));
}
/// <summary>
/// For each root, checks if there are any supported files at root to warn the user during library creation about an invalid setup
/// </summary>
/// <returns></returns>
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("has-files-at-root")]
public ActionResult<IDictionary<string, bool>> AnyFilesAtRoot(CheckForFilesInFolderRootsDto dto)
{
var results = new Dictionary<string, bool>();
foreach (var root in dto.Roots)
{
results.TryAdd(root,
_directoryService
.GetFilesWithCertainExtensions(root, Parser.SupportedExtensions, SearchOption.TopDirectoryOnly)
.Any());
}
return Ok(results);
}
/// <summary>
/// Return a specific library
/// </summary>
@ -624,6 +646,8 @@ public class LibraryController : BaseApiController
library.AllowScrobbling = dto.AllowScrobbling;
library.AllowMetadataMatching = dto.AllowMetadataMatching;
library.EnableMetadata = dto.EnableMetadata;
library.RemovePrefixForSortName = dto.RemovePrefixForSortName;
library.LibraryFileTypes = dto.FileGroupTypes
.Select(t => new LibraryFileTypeGroup() {FileTypeGroup = t, LibraryId = library.Id})
.Distinct()

View File

@ -0,0 +1,37 @@
using API.Extensions;
using API.Services;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
namespace API.Controllers;
[Route("[controller]")]
public class OidcController: ControllerBase
{
[AllowAnonymous]
[HttpGet("login")]
public IActionResult Login(string returnUrl = "/")
{
var properties = new AuthenticationProperties { RedirectUri = returnUrl };
return Challenge(properties, IdentityServiceExtensions.OpenIdConnect);
}
[HttpGet("logout")]
public IActionResult Logout()
{
if (!Request.Cookies.ContainsKey(OidcService.CookieName))
{
return Redirect("/");
}
return SignOut(
new AuthenticationProperties { RedirectUri = "/login" },
CookieAuthenticationDefaults.AuthenticationScheme,
IdentityServiceExtensions.OpenIdConnect);
}
}

View File

@ -148,6 +148,18 @@ public class PersonController : BaseApiController
return Ok(_mapper.Map<PersonDto>(person));
}
/// <summary>
/// Validates if the ASIN (10/13) is valid
/// </summary>
/// <param name="asin"></param>
/// <returns></returns>
[HttpGet("valid-asin")]
public ActionResult<bool> ValidateAsin(string asin)
{
return Ok(!string.IsNullOrEmpty(asin) &&
(ArticleNumberHelper.IsValidIsbn10(asin) || ArticleNumberHelper.IsValidIsbn13(asin)));
}
/// <summary>
/// Attempts to download the cover from CoversDB (Note: Not yet release in Kavita)
/// </summary>

View File

@ -4,6 +4,7 @@ using System.Linq;
using System.Net;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.DTOs.Email;
using API.DTOs.KavitaPlus.Metadata;
using API.DTOs.Settings;
@ -253,4 +254,55 @@ public class SettingsController : BaseApiController
return BadRequest(ex.Message);
}
}
/// <summary>
/// Import field mappings
/// </summary>
/// <returns></returns>
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("import-field-mappings")]
public async Task<ActionResult<FieldMappingsImportResultDto>> ImportFieldMappings([FromBody] ImportFieldMappingsDto dto)
{
try
{
return Ok(await _settingsService.ImportFieldMappings(dto.Data, dto.Settings));
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue importing field mappings");
return BadRequest(ex.Message);
}
}
/// <summary>
/// Retrieve publicly required configuration regarding Oidc
/// </summary>
/// <returns></returns>
[AllowAnonymous]
[HttpGet("oidc")]
public async Task<ActionResult<OidcPublicConfigDto>> GetOidcConfig()
{
var settings = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).OidcConfig;
var publicConfig = _mapper.Map<OidcPublicConfigDto>(settings);
publicConfig.Enabled = !string.IsNullOrEmpty(settings.Authority) &&
!string.IsNullOrEmpty(settings.ClientId) &&
!string.IsNullOrEmpty(settings.Secret);
return Ok(publicConfig);
}
/// <summary>
/// Validate if the given authority is reachable from the server
/// </summary>
/// <param name="authority"></param>
/// <returns></returns>
[Authorize("RequireAdminRole")]
[HttpPost("is-valid-authority")]
public async Task<ActionResult<bool>> IsValidAuthority([FromBody] AuthorityValidationDto authority)
{
return Ok(await _settingsService.IsValidAuthority(authority.Authority));
}
}

View File

@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using API.Entities.Enums;
namespace API.DTOs.Account;
#nullable enable
@ -25,4 +26,5 @@ public sealed record UpdateUserDto
public AgeRestrictionDto AgeRestriction { get; init; } = default!;
/// <inheritdoc cref="API.Entities.AppUser.Email"/>
public string? Email { get; set; } = default!;
public IdentityProvider IdentityProvider { get; init; } = IdentityProvider.Kavita;
}

View File

@ -0,0 +1,8 @@
using System.Collections.Generic;
namespace API.DTOs;
public sealed record CheckForFilesInFolderRootsDto
{
public ICollection<string> Roots { get; init; }
}

View File

@ -0,0 +1,86 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using API.DTOs.KavitaPlus.Metadata;
namespace API.DTOs;
/// <summary>
/// How Kavita should import the new settings
/// </summary>
public enum ImportMode
{
[Description("Replace")]
Replace = 0,
[Description("Merge")]
Merge = 1,
}
/// <summary>
/// How Kavita should resolve conflicts
/// </summary>
public enum ConflictResolution
{
/// <summary>
/// Require the user to override the default
/// </summary>
[Description("Manual")]
Manual = 0,
/// <summary>
/// Keep current value
/// </summary>
[Description("Keep")]
Keep = 1,
/// <summary>
/// Replace with imported value
/// </summary>
[Description("Replace")]
Replace = 2,
}
public sealed record ImportSettingsDto
{
/// <summary>
/// How Kavita should import the new settings
/// </summary>
public ImportMode ImportMode { get; init; }
/// <summary>
/// Default conflict resolution, override with <see cref="AgeRatingConflictResolutions"/> and <see cref="FieldMappingsConflictResolutions"/>
/// </summary>
public ConflictResolution Resolution { get; init; }
/// <summary>
/// Import <see cref="MetadataSettingsDto.Whitelist"/>
/// </summary>
public bool Whitelist { get; init; }
/// <summary>
/// Import <see cref="MetadataSettingsDto.Blacklist"/>
/// </summary>
public bool Blacklist { get; init; }
/// <summary>
/// Import <see cref="MetadataSettingsDto.AgeRatingMappings"/>
/// </summary>
public bool AgeRatings { get; init; }
/// <summary>
/// Import <see cref="MetadataSettingsDto.FieldMappings"/>
/// </summary>
public bool FieldMappings { get; init; }
/// <summary>
/// Override the <see cref="Resolution"/> for specific age ratings
/// </summary>
/// <remarks>Key is the tag</remarks>
public Dictionary<string, ConflictResolution> AgeRatingConflictResolutions { get; init; }
}
public sealed record FieldMappingsImportResultDto
{
public bool Success { get; init; }
/// <summary>
/// Only present if <see cref="Success"/> is true
/// </summary>
public MetadataSettingsDto ResultingMetadataSettings { get; init; }
/// <summary>
/// Keys of the conflicting age ratings mappings
/// </summary>
public List<string> AgeRatingConflicts { get; init; }
}

View File

@ -1,4 +1,5 @@
using System.Collections.Generic;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Entities.MetadataMatching;
@ -7,13 +8,18 @@ using NotImplementedException = System.NotImplementedException;
namespace API.DTOs.KavitaPlus.Metadata;
public sealed record MetadataSettingsDto
public sealed record MetadataSettingsDto: FieldMappingsDto
{
/// <summary>
/// If writing any sort of metadata from upstream (AniList, Hardcover) source is allowed
/// </summary>
public bool Enabled { get; set; }
/// <summary>
/// Enable processing of metadata outside K+; e.g. disk and API
/// </summary>
public bool EnableExtendedMetadataProcessing { get; set; }
/// <summary>
/// Allow the Summary to be written
/// </summary>
@ -75,28 +81,11 @@ public sealed record MetadataSettingsDto
/// </summary>
public bool FirstLastPeopleNaming { get; set; }
/// <summary>
/// Any Genres or Tags that if present, will trigger an Age Rating Override. Highest rating will be prioritized for matching.
/// </summary>
public Dictionary<string, AgeRating> AgeRatingMappings { get; set; }
/// <summary>
/// A list of rules that allow mapping a genre/tag to another genre/tag
/// </summary>
public List<MetadataFieldMappingDto> FieldMappings { get; set; }
/// <summary>
/// A list of overrides that will enable writing to locked fields
/// </summary>
public List<MetadataSettingField> Overrides { get; set; }
/// <summary>
/// Do not allow any Genre/Tag in this list to be written to Kavita
/// </summary>
public List<string> Blacklist { get; set; }
/// <summary>
/// Only allow these Tags to be written to Kavita
/// </summary>
public List<string> Whitelist { get; set; }
/// <summary>
/// Which Roles to allow metadata downloading for
/// </summary>
@ -123,3 +112,30 @@ public sealed record MetadataSettingsDto
return PersonRoles.Contains(character);
}
}
/// <summary>
/// Decoupled from <see cref="MetadataSettingsDto"/> to allow reuse without requiring the full metadata settings in
/// <see cref="ImportFieldMappingsDto"/>
/// </summary>
public record FieldMappingsDto
{
/// <summary>
/// Do not allow any Genre/Tag in this list to be written to Kavita
/// </summary>
public List<string> Blacklist { get; set; }
/// <summary>
/// Only allow these Tags to be written to Kavita
/// </summary>
public List<string> Whitelist { get; set; }
/// <summary>
/// Any Genres or Tags that if present, will trigger an Age Rating Override. Highest rating will be prioritized for matching.
/// </summary>
public Dictionary<string, AgeRating> AgeRatingMappings { get; set; }
/// <summary>
/// A list of rules that allow mapping a genre/tag to another genre/tag
/// </summary>
public List<MetadataFieldMappingDto> FieldMappings { get; set; }
}

View File

@ -70,4 +70,8 @@ public sealed record LibraryDto
/// Allow Kavita to read metadata (ComicInfo.xml, Epub, PDF)
/// </summary>
public bool EnableMetadata { get; set; } = true;
/// <summary>
/// Should Kavita remove sort articles "The" for the sort name
/// </summary>
public bool RemovePrefixForSortName { get; set; } = false;
}

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using API.DTOs.Account;
using API.Entities.Enums;
namespace API.DTOs;
#nullable enable
@ -24,4 +25,5 @@ public sealed record MemberDto
public DateTime LastActiveUtc { get; init; }
public IEnumerable<LibraryDto>? Libraries { get; init; }
public IEnumerable<string>? Roles { get; init; }
public IdentityProvider IdentityProvider { get; init; }
}

View File

@ -0,0 +1,3 @@
namespace API.DTOs.Settings;
public sealed record AuthorityValidationDto(string Authority);

View File

@ -0,0 +1,15 @@
using API.DTOs.KavitaPlus.Metadata;
namespace API.DTOs.Settings;
public sealed record ImportFieldMappingsDto
{
/// <summary>
/// Import settings
/// </summary>
public ImportSettingsDto Settings { get; init; }
/// <summary>
/// Data to import
/// </summary>
public FieldMappingsDto Data { get; init; }
}

View File

@ -0,0 +1,68 @@
#nullable enable
using System.Collections.Generic;
using System.Security.Claims;
using API.Entities.Enums;
namespace API.DTOs.Settings;
/// <summary>
/// All configuration regarding OIDC
/// </summary>
/// <remarks>This class is saved as a JsonObject in the DB, assign default values to prevent unexpected NPE</remarks>
public sealed record OidcConfigDto: OidcPublicConfigDto
{
/// <summary>
/// Optional OpenID Connect Authority URL. Not managed in DB. Managed in appsettings.json and synced to DB.
/// </summary>
public string Authority { get; set; } = string.Empty;
/// <summary>
/// Optional OpenID Connect ClientId, defaults to kavita. Not managed in DB. Managed in appsettings.json and synced to DB.
/// </summary>
public string ClientId { get; set; } = string.Empty;
/// <summary>
/// Optional OpenID Connect Secret. Not managed in DB. Managed in appsettings.json and synced to DB.
/// </summary>
public string Secret { get; set; } = string.Empty;
/// <summary>
/// If true, auto creates a new account when someone logs in via OpenID Connect
/// </summary>
public bool ProvisionAccounts { get; set; } = false;
/// <summary>
/// Require emails to be verified by the OpenID Connect provider when creating accounts on login
/// </summary>
public bool RequireVerifiedEmail { get; set; } = true;
/// <summary>
/// Overwrite Kavita roles, libraries and age rating with OpenIDConnect provided roles on log in.
/// </summary>
public bool SyncUserSettings { get; set; } = false;
/// <summary>
/// A prefix that all roles Kavita checks for during sync must have
/// </summary>
public string RolesPrefix { get; set; } = string.Empty;
/// <summary>
/// The JWT claim roles are mapped under, defaults to <see cref="ClaimTypes.Role"/>
/// </summary>
public string RolesClaim { get; set; } = ClaimTypes.Role;
/// <summary>
/// Custom scopes Kavita should request from your OIDC provider
/// </summary>
/// <remarks>Advanced setting</remarks>
public List<string> CustomScopes { get; set; } = [];
// Default values used when SyncUserSettings is false
#region Default user settings
public List<string> DefaultRoles { get; set; } = [];
public List<int> DefaultLibraries { get; set; } = [];
public AgeRating DefaultAgeRestriction { get; set; } = AgeRating.Unknown;
public bool DefaultIncludeUnknowns { get; set; } = false;
#endregion
/// <summary>
/// Returns true if the <see cref="OidcPublicConfigDto.Authority"/> has been set
/// </summary>
public bool Enabled => !string.IsNullOrEmpty(Authority);
}

View File

@ -0,0 +1,24 @@
#nullable enable
namespace API.DTOs.Settings;
/**
* The part of the OIDC configuration that is returned by the API without authentication
*/
public record OidcPublicConfigDto
{
/// <summary>
/// Automatically redirect to the Oidc login screen
/// </summary>
public bool AutoLogin { get; set; }
/// <summary>
/// Disables password authentication for non-admin users
/// </summary>
public bool DisablePasswordAuthentication { get; set; }
/// <summary>
/// Name of your provider, used to display on the login screen
/// </summary>
/// <remarks>Default to OpenID Connect</remarks>
public string ProviderName { get; set; } = "OpenID Connect";
public bool Enabled { get; set; } = false;
}

View File

@ -92,6 +92,11 @@ public sealed record ServerSettingDto
/// SMTP Configuration
/// </summary>
public SmtpConfigDto SmtpConfig { get; set; }
/// <summary>
/// OIDC Configuration
/// </summary>
public OidcConfigDto OidcConfig { get; set; }
/// <summary>
/// The Date Kavita was first installed
/// </summary>

View File

@ -22,6 +22,10 @@ public sealed record LibraryStatV3
/// </summary>
public bool CreateReadingListsFromMetadata { get; set; }
/// <summary>
/// If the library has metadata turned on
/// </summary>
public bool EnabledMetadata { get; set; }
/// <summary>
/// Type of the Library
/// </summary>
public LibraryType LibraryType { get; set; }

View File

@ -131,6 +131,10 @@ public sealed record ServerInfoV3Dto
/// Is this server using Kavita+
/// </summary>
public bool ActiveKavitaPlusSubscription { get; set; }
/// <summary>
/// Is OIDC enabled
/// </summary>
public bool OidcEnabled { get; set; }
#endregion
#region Users

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using API.Data.Misc;
using API.Entities.Enums;
using API.Entities.Enums.Device;
namespace API.DTOs.Stats.V3;
@ -76,6 +77,10 @@ public sealed record UserStatV3
/// Roles for this user
/// </summary>
public ICollection<string> Roles { get; set; }
/// <summary>
/// Who manages the user (OIDC, Kavita)
/// </summary>
public IdentityProvider IdentityProvider { get; set; }
}

View File

@ -30,6 +30,8 @@ public sealed record UpdateLibraryDto
public bool AllowMetadataMatching { get; init; }
[Required]
public bool EnableMetadata { get; init; }
[Required]
public bool RemovePrefixForSortName { get; init; }
/// <summary>
/// What types of files to allow the scanner to pickup
/// </summary>

View File

@ -1,6 +1,8 @@

using System;
using System.Collections.Generic;
using API.DTOs.Account;
using API.Entities;
using API.Entities.Enums;
namespace API.DTOs;
#nullable enable
@ -9,10 +11,13 @@ public sealed record UserDto
{
public string Username { get; init; } = null!;
public string Email { get; init; } = null!;
public IList<string> Roles { get; set; } = [];
public string Token { get; set; } = null!;
public string? RefreshToken { get; set; }
public string? ApiKey { get; init; }
public UserPreferencesDto? Preferences { get; set; }
public AgeRestrictionDto? AgeRestriction { get; init; }
public string KavitaVersion { get; set; }
/// <inheritdoc cref="AppUser.IdentityProvider"/>
public IdentityProvider IdentityProvider { get; init; }
}

View File

@ -300,6 +300,10 @@ public sealed class DataContext : IdentityDbContext<AppUser, AppRole, int,
v => JsonSerializer.Deserialize<IList<MetadataSettingField>>(v, JsonSerializerOptions.Default) ?? new List<MetadataSettingField>())
.HasColumnType("TEXT")
.HasDefaultValue(new List<MetadataSettingField>());
builder.Entity<AppUser>()
.Property(user => user.IdentityProvider)
.HasDefaultValue(IdentityProvider.Kavita);
}
#nullable enable

View File

@ -0,0 +1,51 @@
using System;
using System.Threading.Tasks;
using API.Entities.History;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.8.8 - If Kavita+ users had Metadata Matching settings already, ensure the new non-Kavita+ system is enabled to match
/// existing experience
/// </summary>
public static class ManualMigrateEnableMetadataMatchingDefault
{
public static async Task Migrate(DataContext context, IUnitOfWork unitOfWork, ILogger<Program> logger)
{
if (await context.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateEnableMetadataMatchingDefault"))
{
return;
}
logger.LogCritical("Running ManualMigrateEnableMetadataMatchingDefault migration - Please be patient, this may take some time. This is not an error");
var settings = await unitOfWork.SettingsRepository.GetMetadataSettingDto();
var shouldBeEnabled = settings != null && (settings.Enabled || settings.AgeRatingMappings.Count != 0 ||
settings.Blacklist.Count != 0 || settings.Whitelist.Count != 0 ||
settings.Whitelist.Count != 0 || settings.Blacklist.Count != 0 ||
settings.FieldMappings.Count != 0);
if (shouldBeEnabled && !settings.EnableExtendedMetadataProcessing)
{
var mSettings = await unitOfWork.SettingsRepository.GetMetadataSettings();
mSettings.EnableExtendedMetadataProcessing = shouldBeEnabled;
await unitOfWork.CommitAsync();
}
await context.ManualMigrationHistory.AddAsync(new ManualMigrationHistory()
{
Name = "ManualMigrateEnableMetadataMatchingDefault",
ProductVersion = BuildInfo.Version.ToString(),
RanAt = DateTime.UtcNow
});
await context.SaveChangesAsync();
logger.LogCritical("Running ManualMigrateEnableMetadataMatchingDefault migration - Completed. This is not an error");
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class LibraryRemoveSortPrefix : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "RemovePrefixForSortName",
table: "Library",
type: "INTEGER",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "RemovePrefixForSortName",
table: "Library");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class AddEnableExtendedMetadataProcessing : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "EnableExtendedMetadataProcessing",
table: "MetadataSettings",
type: "INTEGER",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "EnableExtendedMetadataProcessing",
table: "MetadataSettings");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,39 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class OpenIDConnect : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<int>(
name: "IdentityProvider",
table: "AspNetUsers",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.AddColumn<string>(
name: "OidcId",
table: "AspNetUsers",
type: "TEXT",
nullable: true);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "IdentityProvider",
table: "AspNetUsers");
migrationBuilder.DropColumn(
name: "OidcId",
table: "AspNetUsers");
}
}
}

View File

@ -17,7 +17,7 @@ namespace API.Data.Migrations
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder.HasAnnotation("ProductVersion", "9.0.6");
modelBuilder.HasAnnotation("ProductVersion", "9.0.7");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
@ -90,6 +90,11 @@ namespace API.Data.Migrations
b.Property<bool>("HasRunScrobbleEventGeneration")
.HasColumnType("INTEGER");
b.Property<int>("IdentityProvider")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER")
.HasDefaultValue(0);
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
@ -116,6 +121,9 @@ namespace API.Data.Migrations
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("OidcId")
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
@ -1341,6 +1349,9 @@ namespace API.Data.Migrations
b.Property<string>("PrimaryColor")
.HasColumnType("TEXT");
b.Property<bool>("RemovePrefixForSortName")
.HasColumnType("INTEGER");
b.Property<string>("SecondaryColor")
.HasColumnType("TEXT");
@ -1859,6 +1870,9 @@ namespace API.Data.Migrations
.HasColumnType("INTEGER")
.HasDefaultValue(true);
b.Property<bool>("EnableExtendedMetadataProcessing")
.HasColumnType("INTEGER");
b.Property<bool>("EnableGenres")
.HasColumnType("INTEGER");
@ -3634,7 +3648,8 @@ namespace API.Data.Migrations
b.Navigation("TableOfContents");
b.Navigation("UserPreferences");
b.Navigation("UserPreferences")
.IsRequired();
b.Navigation("UserRoles");

View File

@ -107,6 +107,13 @@ public interface IUserRepository
Task<IList<AppUserSideNavStream>> GetDashboardStreamsByIds(IList<int> streamIds);
Task<IEnumerable<UserTokenInfo>> GetUserTokenInfo();
Task<AppUser?> GetUserByDeviceEmail(string deviceEmail);
/// <summary>
/// Try getting a user by the id provided by OIDC
/// </summary>
/// <param name="oidcId"></param>
/// <param name="includes"></param>
/// <returns></returns>
Task<AppUser?> GetByOidcId(string? oidcId, AppUserIncludes includes = AppUserIncludes.None);
}
public class UserRepository : IUserRepository
@ -557,6 +564,16 @@ public class UserRepository : IUserRepository
.FirstOrDefaultAsync();
}
public async Task<AppUser?> GetByOidcId(string? oidcId, AppUserIncludes includes = AppUserIncludes.None)
{
if (string.IsNullOrEmpty(oidcId)) return null;
return await _context.AppUser
.Where(u => u.OidcId == oidcId)
.Includes(includes)
.FirstOrDefaultAsync();
}
public async Task<IEnumerable<AppUser>> GetAdminUsersAsync()
{
@ -789,6 +806,7 @@ public class UserRepository : IUserRepository
LastActiveUtc = u.LastActiveUtc,
Roles = u.UserRoles.Select(r => r.Role.Name).ToList(),
IsPending = !u.EmailConfirmed,
IdentityProvider = u.IdentityProvider,
AgeRestriction = new AgeRestrictionDto()
{
AgeRating = u.AgeRestriction,
@ -800,7 +818,7 @@ public class UserRepository : IUserRepository
Type = l.Type,
LastScanned = l.LastScanned,
Folders = l.Folders.Select(x => x.Path).ToList()
}).ToList()
}).ToList(),
})
.AsSplitQuery()
.AsNoTracking()

View File

@ -5,9 +5,11 @@ using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text.Json;
using System.Threading.Tasks;
using API.Constants;
using API.Data.Repositories;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Enums.Theme;
@ -252,6 +254,7 @@ public static class Seed
new() {
Key = ServerSettingKey.CacheSize, Value = Configuration.DefaultCacheMemory + string.Empty
}, // Not used from DB, but DB is sync with appSettings.json
new() { Key = ServerSettingKey.OidcConfiguration, Value = JsonSerializer.Serialize(new OidcConfigDto())},
new() {Key = ServerSettingKey.EmailHost, Value = string.Empty},
new() {Key = ServerSettingKey.EmailPort, Value = string.Empty},
@ -289,9 +292,29 @@ public static class Seed
(await context.ServerSetting.FirstAsync(s => s.Key == ServerSettingKey.CacheSize)).Value =
Configuration.CacheSize + string.Empty;
await SetOidcSettingsFromDisk(context);
await context.SaveChangesAsync();
}
public static async Task SetOidcSettingsFromDisk(DataContext context)
{
var oidcSettingEntry = await context.ServerSetting
.FirstOrDefaultAsync(setting => setting.Key == ServerSettingKey.OidcConfiguration);
var storedOidcSettings = JsonSerializer.Deserialize<OidcConfigDto>(oidcSettingEntry!.Value)!;
var diskOidcSettings = Configuration.OidcSettings;
storedOidcSettings.Authority = diskOidcSettings.Authority;
storedOidcSettings.ClientId = diskOidcSettings.ClientId;
storedOidcSettings.Secret = diskOidcSettings.Secret;
storedOidcSettings.CustomScopes = diskOidcSettings.CustomScopes;
oidcSettingEntry.Value = JsonSerializer.Serialize(storedOidcSettings);
}
public static async Task SeedMetadataSettings(DataContext context)
{
await context.Database.EnsureCreatedAsync();

View File

@ -1,6 +1,8 @@
using System;
#nullable enable
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using API.DTOs.Settings;
using API.Entities.Enums;
using API.Entities.Interfaces;
using API.Entities.Scrobble;
@ -89,6 +91,15 @@ public class AppUser : IdentityUser<int>, IHasConcurrencyToken
/// <remarks>Kavita+ only</remarks>
public DateTime ScrobbleEventGenerationRan { get; set; }
/// <summary>
/// The sub returned the by OIDC provider
/// </summary>
public string? OidcId { get; set; }
/// <summary>
/// The IdentityProvider for the user, default to <see cref="Enums.IdentityProvider.Kavita"/>
/// </summary>
public IdentityProvider IdentityProvider { get; set; } = IdentityProvider.Kavita;
/// <summary>
/// A list of Series the user doesn't want scrobbling for

View File

@ -0,0 +1,14 @@
using System.ComponentModel;
namespace API.Entities.Enums;
/// <summary>
/// Who provides the identity of the user
/// </summary>
public enum IdentityProvider
{
[Description("Kavita")]
Kavita = 0,
[Description("OpenID Connect")]
OpenIdConnect = 1,
}

View File

@ -197,4 +197,10 @@ public enum ServerSettingKey
/// </summary>
[Description("FirstInstallVersion")]
FirstInstallVersion = 39,
/// <summary>
/// A Json object of type <see cref="API.DTOs.Settings.OidcConfigDto"/>
/// </summary>
[Description("OidcConfiguration")]
OidcConfiguration = 40,
}

View File

@ -52,6 +52,10 @@ public class Library : IEntityDate, IHasCoverImage
/// Should Kavita read metadata files from the library
/// </summary>
public bool EnableMetadata { get; set; } = true;
/// <summary>
/// Should Kavita remove sort articles "The" for the sort name
/// </summary>
public bool RemovePrefixForSortName { get; set; } = false;
public DateTime Created { get; set; }

View File

@ -14,6 +14,11 @@ public class MetadataSettings
/// </summary>
public bool Enabled { get; set; }
/// <summary>
/// Enable processing of metadata outside K+; e.g. disk and API
/// </summary>
public bool EnableExtendedMetadataProcessing { get; set; }
#region Series Metadata
/// <summary>

View File

@ -4,12 +4,14 @@ using API.Data;
using API.Helpers;
using API.Services;
using API.Services.Plus;
using API.Services.Store;
using API.Services.Tasks;
using API.Services.Tasks.Metadata;
using API.Services.Tasks.Scanner;
using API.SignalR;
using API.SignalR.Presence;
using Kavita.Common;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Hosting;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Diagnostics;
@ -83,6 +85,8 @@ public static class ApplicationServiceExtensions
services.AddScoped<ISmartCollectionSyncService, SmartCollectionSyncService>();
services.AddScoped<IWantToReadSyncService, WantToReadSyncService>();
services.AddScoped<IOidcService, OidcService>();
services.AddSqLite();
services.AddSignalR(opt => opt.EnableDetailedErrors = true);
@ -106,6 +110,7 @@ public static class ApplicationServiceExtensions
options.SizeLimit = Configuration.CacheSize * 1024 * 1024; // 75 MB
options.CompactionPercentage = 0.1; // LRU compaction (10%)
});
services.AddSingleton<ITicketStore, CustomTicketStore>();
services.AddSwaggerGen(g =>
{

View File

@ -1,4 +1,7 @@
using System.Security.Claims;
using System.Collections.Generic;
using System.Linq;
using System.Security.Claims;
using API.Constants;
using Kavita.Common;
using JwtRegisteredClaimNames = Microsoft.IdentityModel.JsonWebTokens.JwtRegisteredClaimNames;
@ -8,6 +11,8 @@ namespace API.Extensions;
public static class ClaimsPrincipalExtensions
{
private const string NotAuthenticatedMessage = "User is not authenticated";
private const string EmailVerifiedClaimType = "email_verified";
/// <summary>
/// Get's the authenticated user's username
/// </summary>
@ -26,4 +31,26 @@ public static class ClaimsPrincipalExtensions
var userClaim = user.FindFirst(ClaimTypes.NameIdentifier) ?? throw new KavitaException(NotAuthenticatedMessage);
return int.Parse(userClaim.Value);
}
public static bool HasVerifiedEmail(this ClaimsPrincipal user)
{
var emailVerified = user.FindFirst(EmailVerifiedClaimType);
if (emailVerified == null) return false;
if (!bool.TryParse(emailVerified.Value, out bool emailVerifiedValue) || !emailVerifiedValue)
{
return false;
}
return true;
}
public static IList<string> GetClaimsWithPrefix(this ClaimsPrincipal claimsPrincipal, string claimType, string prefix)
{
return claimsPrincipal
.FindAll(claimType)
.Where(c => c.Value.StartsWith(prefix))
.Select(c => c.Value.TrimPrefix(prefix))
.ToList();
}
}

View File

@ -0,0 +1,43 @@
#nullable enable
using System;
using System.ComponentModel;
using System.Reflection;
namespace API.Extensions;
public static class EnumExtensions
{
/// <summary>
/// Extension on Enum.TryParse which also tried matching on the description attribute
/// </summary>
/// <returns>if a match was found</returns>
/// <remarks>First tries Enum.TryParse then fall back to the more expensive operation</remarks>
public static bool TryParse<TEnum>(string? value, out TEnum result) where TEnum : struct, Enum
{
result = default;
if (string.IsNullOrEmpty(value))
{
return false;
}
if (Enum.TryParse(value, out result))
{
return true;
}
foreach (var field in typeof(TEnum).GetFields(BindingFlags.Public | BindingFlags.Static))
{
var description = field.GetCustomAttribute<DescriptionAttribute>()?.Description;
if (!string.IsNullOrEmpty(description) &&
string.Equals(description, value, StringComparison.OrdinalIgnoreCase))
{
result = (TEnum)field.GetValue(null)!;
return true;
}
}
return false;
}
}

View File

@ -6,6 +6,7 @@ using API.Data.Misc;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Metadata;
using Microsoft.AspNetCore.Identity;
namespace API.Extensions;
#nullable enable

View File

@ -1,21 +1,43 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Claims;
using System.Text;
using System.Threading.Tasks;
using API.Constants;
using API.Data;
using API.Entities;
using API.Services;
using Kavita.Common;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Authentication.JwtBearer;
using Microsoft.AspNetCore.Authentication.OpenIdConnect;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Protocols.OpenIdConnect;
using Microsoft.IdentityModel.Tokens;
using MessageReceivedContext = Microsoft.AspNetCore.Authentication.JwtBearer.MessageReceivedContext;
using TokenValidatedContext = Microsoft.AspNetCore.Authentication.OpenIdConnect.TokenValidatedContext;
namespace API.Extensions;
#nullable enable
public static class IdentityServiceExtensions
{
public static IServiceCollection AddIdentityServices(this IServiceCollection services, IConfiguration config)
private const string DynamicHybrid = nameof(DynamicHybrid);
public const string OpenIdConnect = nameof(OpenIdConnect);
private const string LocalIdentity = nameof(LocalIdentity);
private const string OidcCallback = "/signin-oidc";
private const string OidcLogoutCallback = "/signout-callback-oidc";
public static IServiceCollection AddIdentityServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment environment)
{
services.Configure<IdentityOptions>(options =>
{
@ -47,42 +69,264 @@ public static class IdentityServiceExtensions
.AddRoleValidator<RoleValidator<AppRole>>()
.AddEntityFrameworkStores<DataContext>();
var oidcSettings = Configuration.OidcSettings;
services.AddAuthentication(JwtBearerDefaults.AuthenticationScheme)
.AddJwtBearer(options =>
var auth = services.AddAuthentication(DynamicHybrid)
.AddPolicyScheme(DynamicHybrid, JwtBearerDefaults.AuthenticationScheme, options =>
{
options.TokenValidationParameters = new TokenValidationParameters()
{
ValidateIssuerSigningKey = true,
IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["TokenKey"]!)),
ValidateIssuer = false,
ValidateAudience = false,
ValidIssuer = "Kavita"
};
var enabled = oidcSettings.Enabled;
options.Events = new JwtBearerEvents()
options.ForwardDefaultSelector = ctx =>
{
OnMessageReceived = context =>
if (!enabled) return LocalIdentity;
if (ctx.Request.Path.StartsWithSegments(OidcCallback) ||
ctx.Request.Path.StartsWithSegments(OidcLogoutCallback))
{
var accessToken = context.Request.Query["access_token"];
var path = context.HttpContext.Request.Path;
// Only use query string based token on SignalR hubs
if (!string.IsNullOrEmpty(accessToken) && path.StartsWithSegments("/hubs"))
{
context.Token = accessToken;
}
return Task.CompletedTask;
return OpenIdConnect;
}
if (ctx.Request.Headers.Authorization.Count != 0)
{
return LocalIdentity;
}
if (ctx.Request.Cookies.ContainsKey(OidcService.CookieName))
{
return OpenIdConnect;
}
return LocalIdentity;
};
});
services.AddAuthorization(opt =>
if (oidcSettings.Enabled)
{
opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole));
opt.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole));
opt.AddPolicy("RequireChangePasswordRole", policy => policy.RequireRole(PolicyConstants.ChangePasswordRole, PolicyConstants.AdminRole));
services.SetupOpenIdConnectAuthentication(auth, oidcSettings, environment);
}
auth.AddJwtBearer(LocalIdentity, options =>
{
options.TokenValidationParameters = new TokenValidationParameters
{
ValidateIssuerSigningKey = true,
IssuerSigningKey = new SymmetricSecurityKey(Encoding.UTF8.GetBytes(config["TokenKey"]!)),
ValidateIssuer = false,
ValidateAudience = false,
ValidIssuer = "Kavita",
};
options.Events = new JwtBearerEvents
{
OnMessageReceived = SetTokenFromQuery,
};
});
services.AddAuthorizationBuilder()
.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole))
.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole))
.AddPolicy("RequireChangePasswordRole", policy => policy.RequireRole(PolicyConstants.ChangePasswordRole, PolicyConstants.AdminRole));
return services;
}
private static void SetupOpenIdConnectAuthentication(this IServiceCollection services, AuthenticationBuilder auth,
Configuration.OpenIdConnectSettings settings, IWebHostEnvironment environment)
{
var isDevelopment = environment.IsEnvironment(Environments.Development);
var baseUrl = Configuration.BaseUrl;
var apiPrefix = baseUrl + "api";
var hubsPrefix = baseUrl + "hubs";
services.AddOptions<CookieAuthenticationOptions>(CookieAuthenticationDefaults.AuthenticationScheme).Configure<ITicketStore>((options, store) =>
{
options.ExpireTimeSpan = TimeSpan.FromDays(7);
options.SlidingExpiration = true;
options.Cookie.HttpOnly = true;
options.Cookie.IsEssential = true;
options.Cookie.MaxAge = TimeSpan.FromDays(7);
options.SessionStore = store;
if (isDevelopment)
{
options.Cookie.Domain = null;
}
options.Events = new CookieAuthenticationEvents
{
OnValidatePrincipal = async ctx =>
{
var oidcService = ctx.HttpContext.RequestServices.GetRequiredService<IOidcService>();
var user = await oidcService.RefreshCookieToken(ctx);
if (user != null)
{
var claims = await OidcService.ConstructNewClaimsList(ctx.HttpContext.RequestServices, ctx.Principal, user!, false);
ctx.ReplacePrincipal(new ClaimsPrincipal(new ClaimsIdentity(claims, ctx.Scheme.Name)));
}
},
OnRedirectToAccessDenied = ctx =>
{
ctx.Response.StatusCode = StatusCodes.Status401Unauthorized;
return Task.CompletedTask;
},
};
});
auth.AddCookie(CookieAuthenticationDefaults.AuthenticationScheme);
auth.AddOpenIdConnect(OpenIdConnect, options =>
{
options.Authority = settings.Authority;
options.ClientId = settings.ClientId;
options.ClientSecret = settings.Secret;
options.RequireHttpsMetadata = options.Authority.StartsWith("https://");
options.SignInScheme = CookieAuthenticationDefaults.AuthenticationScheme;
options.ResponseType = OpenIdConnectResponseType.Code;
options.CallbackPath = OidcCallback;
options.SignedOutCallbackPath = OidcLogoutCallback;
options.SaveTokens = true;
options.GetClaimsFromUserInfoEndpoint = true;
options.Scope.Clear();
options.Scope.Add("openid");
options.Scope.Add("profile");
options.Scope.Add("offline_access");
options.Scope.Add("roles");
options.Scope.Add("email");
foreach (var customScope in settings.CustomScopes)
{
options.Scope.Add(customScope);
}
options.Events = new OpenIdConnectEvents
{
OnTokenValidated = OidcClaimsPrincipalConverter,
OnAuthenticationFailed = ctx =>
{
ctx.Response.Redirect(baseUrl + "login?skipAutoLogin=true&error=" + Uri.EscapeDataString(ctx.Exception.Message));
ctx.HandleResponse();
return Task.CompletedTask;
},
OnRedirectToIdentityProviderForSignOut = ctx =>
{
if (!isDevelopment && !string.IsNullOrEmpty(ctx.ProtocolMessage.PostLogoutRedirectUri))
{
ctx.ProtocolMessage.PostLogoutRedirectUri = ctx.ProtocolMessage.PostLogoutRedirectUri.Replace("http://", "https://");
}
return Task.CompletedTask;
},
OnRedirectToIdentityProvider = ctx =>
{
// Intercept redirects on API requests and instead return 401
// These redirects are auto login when .NET finds a cookie that it can't match inside the cookie store. I.e. after a restart
if (ctx.Request.Path.StartsWithSegments(apiPrefix) || ctx.Request.Path.StartsWithSegments(hubsPrefix))
{
ctx.Response.StatusCode = StatusCodes.Status401Unauthorized;
ctx.HandleResponse();
return Task.CompletedTask;
}
if (!isDevelopment && !string.IsNullOrEmpty(ctx.ProtocolMessage.RedirectUri))
{
ctx.ProtocolMessage.RedirectUri = ctx.ProtocolMessage.RedirectUri.Replace("http://", "https://");
}
return Task.CompletedTask;
},
};
});
}
/// <summary>
/// Called after the redirect from the OIDC provider, tries matching the user and update the principal
/// to have the correct claims and properties. This is required to later auto refresh; and ensure .NET knows which
/// Kavita roles the user has
/// </summary>
/// <param name="ctx"></param>
private static async Task OidcClaimsPrincipalConverter(TokenValidatedContext ctx)
{
if (ctx.Principal == null) return;
var oidcService = ctx.HttpContext.RequestServices.GetRequiredService<IOidcService>();
var user = await oidcService.LoginOrCreate(ctx.Request, ctx.Principal);
if (user == null)
{
throw new KavitaException("errors.oidc.no-account");
}
var claims = await OidcService.ConstructNewClaimsList(ctx.HttpContext.RequestServices, ctx.Principal, user);
var tokens = CopyOidcTokens(ctx);
var identity = new ClaimsIdentity(claims, ctx.Scheme.Name);
var principal = new ClaimsPrincipal(identity);
ctx.Properties ??= new AuthenticationProperties();
ctx.Properties.StoreTokens(tokens);
ctx.HttpContext.User = principal;
ctx.Principal = principal;
ctx.Success();
}
/// <summary>
/// Copy tokens returned by the OIDC provider that we require later
/// </summary>
/// <param name="ctx"></param>
/// <returns></returns>
private static List<AuthenticationToken> CopyOidcTokens(TokenValidatedContext ctx)
{
if (ctx.TokenEndpointResponse == null)
{
return [];
}
var tokens = new List<AuthenticationToken>();
if (!string.IsNullOrEmpty(ctx.TokenEndpointResponse.RefreshToken))
{
tokens.Add(new AuthenticationToken { Name = OidcService.RefreshToken, Value = ctx.TokenEndpointResponse.RefreshToken });
}
else
{
var logger = ctx.HttpContext.RequestServices.GetRequiredService<ILogger<OidcService>>();
logger.LogWarning("OIDC login without refresh token, automatic sync will not work for this user");
}
if (!string.IsNullOrEmpty(ctx.TokenEndpointResponse.IdToken))
{
tokens.Add(new AuthenticationToken { Name = OidcService.IdToken, Value = ctx.TokenEndpointResponse.IdToken });
}
if (!string.IsNullOrEmpty(ctx.TokenEndpointResponse.ExpiresIn))
{
var expiresAt = DateTimeOffset.UtcNow.AddSeconds(double.Parse(ctx.TokenEndpointResponse.ExpiresIn));
tokens.Add(new AuthenticationToken { Name = OidcService.ExpiresAt, Value = expiresAt.ToString("o") });
}
return tokens;
}
private static Task SetTokenFromQuery(MessageReceivedContext context)
{
var accessToken = context.Request.Query["access_token"];
var path = context.HttpContext.Request.Path;
// Only use query string based token on SignalR hubs
if (!string.IsNullOrEmpty(accessToken) && path.StartsWithSegments("/hubs"))
{
context.Token = accessToken;
}
return Task.CompletedTask;
}
}

View File

@ -52,4 +52,33 @@ public static class StringExtensions
{
return string.IsNullOrEmpty(value) ? defaultValue : double.Parse(value, CultureInfo.InvariantCulture);
}
public static string TrimPrefix(this string? value, string prefix)
{
if (string.IsNullOrEmpty(value)) return string.Empty;
if (!value.StartsWith(prefix)) return value;
return value.Substring(prefix.Length);
}
/// <summary>
/// Censor the input string by removing all but the first and last char.
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
/// <remarks>If the input is an email (contains @), the domain will remain untouched</remarks>
public static string Censor(this string? input)
{
if (string.IsNullOrWhiteSpace(input)) return input ?? string.Empty;
var atIdx = input.IndexOf('@');
if (atIdx == -1)
{
return $"{input[0]}{new string('*', input.Length - 1)}";
}
return input[0] + new string('*', atIdx - 1) + input[atIdx..];
}
}

View File

@ -386,7 +386,6 @@ public class AutoMapperProfiles : Profile
.ForMember(dest => dest.Overrides, opt => opt.MapFrom(src => src.Overrides ?? new List<MetadataSettingField>()))
.ForMember(dest => dest.AgeRatingMappings, opt => opt.MapFrom(src => src.AgeRatingMappings ?? new Dictionary<string, AgeRating>()));
CreateMap<OidcConfigDto, OidcPublicConfigDto>();
}
}

View File

@ -0,0 +1,101 @@
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
namespace API.Helpers;
/// <summary>
/// Responsible for parsing book titles "The man on the street" and removing the prefix -> "man on the street".
/// </summary>
/// <remarks>This code is performance sensitive</remarks>
public static class BookSortTitlePrefixHelper
{
private static readonly Dictionary<string, byte> PrefixLookup;
private static readonly Dictionary<char, List<string>> PrefixesByFirstChar;
static BookSortTitlePrefixHelper()
{
var prefixes = new[]
{
// English
"the", "a", "an",
// Spanish
"el", "la", "los", "las", "un", "una", "unos", "unas",
// French
"le", "la", "les", "un", "une", "des",
// German
"der", "die", "das", "den", "dem", "ein", "eine", "einen", "einer",
// Italian
"il", "lo", "la", "gli", "le", "un", "uno", "una",
// Portuguese
"o", "a", "os", "as", "um", "uma", "uns", "umas",
// Russian (transliterated common ones)
"в", "на", "с", "к", "от", "для",
};
// Build lookup structures
PrefixLookup = new Dictionary<string, byte>(prefixes.Length, StringComparer.OrdinalIgnoreCase);
PrefixesByFirstChar = new Dictionary<char, List<string>>();
foreach (var prefix in prefixes)
{
PrefixLookup[prefix] = 1;
var firstChar = char.ToLowerInvariant(prefix[0]);
if (!PrefixesByFirstChar.TryGetValue(firstChar, out var list))
{
list = [];
PrefixesByFirstChar[firstChar] = list;
}
list.Add(prefix);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static ReadOnlySpan<char> GetSortTitle(ReadOnlySpan<char> title)
{
if (title.IsEmpty) return title;
// Fast detection of script type by first character
var firstChar = title[0];
// CJK Unicode ranges - no processing needed for most cases
if ((firstChar >= 0x4E00 && firstChar <= 0x9FFF) || // CJK Unified
(firstChar >= 0x3040 && firstChar <= 0x309F) || // Hiragana
(firstChar >= 0x30A0 && firstChar <= 0x30FF)) // Katakana
{
return title;
}
var firstSpaceIndex = title.IndexOf(' ');
if (firstSpaceIndex <= 0) return title;
var potentialPrefix = title.Slice(0, firstSpaceIndex);
// Fast path: check if first character could match any prefix
firstChar = char.ToLowerInvariant(potentialPrefix[0]);
if (!PrefixesByFirstChar.ContainsKey(firstChar))
return title;
// Only do the expensive lookup if first character matches
if (PrefixLookup.ContainsKey(potentialPrefix.ToString()))
{
var remainder = title.Slice(firstSpaceIndex + 1);
return remainder.IsEmpty ? title : remainder;
}
return title;
}
/// <summary>
/// Removes the sort prefix
/// </summary>
/// <param name="title"></param>
/// <returns></returns>
public static string GetSortTitle(string title)
{
var result = GetSortTitle(title.AsSpan());
return result.ToString();
}
}

View File

@ -2,6 +2,7 @@
using System.Linq;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using Kavita.Common;
namespace API.Helpers.Builders;
@ -68,4 +69,10 @@ public class AppUserBuilder : IEntityBuilder<AppUser>
_appUser.UserRoles.Add(new AppUserRole() {Role = new AppRole() {Name = role}});
return this;
}
public AppUserBuilder WithIdentityProvider(IdentityProvider identityProvider)
{
_appUser.IdentityProvider = identityProvider;
return this;
}
}

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text.Json;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
@ -129,6 +130,9 @@ public class ServerSettingConverter : ITypeConverter<IEnumerable<ServerSetting>,
case ServerSettingKey.FirstInstallVersion:
destination.FirstInstallVersion = row.Value;
break;
case ServerSettingKey.OidcConfiguration:
destination.OidcConfig = JsonSerializer.Deserialize<OidcConfigDto>(row.Value)!;
break;
case ServerSettingKey.LicenseKey:
case ServerSettingKey.EnableAuthentication:
case ServerSettingKey.EmailServiceUrl:

View File

@ -130,7 +130,7 @@ public class PdfComicInfoExtractor : IPdfComicInfoExtractor
{
try
{
var extractor = new PdfMetadataExtractor(_logger, filePath);
using var extractor = new PdfMetadataExtractor(_logger, filePath);
return GetComicInfoFromMetadata(extractor.GetMetadata(), filePath);
}
@ -138,9 +138,12 @@ public class PdfComicInfoExtractor : IPdfComicInfoExtractor
{
_logger.LogWarning(ex, "[GetComicInfo] There was an exception parsing PDF metadata for {File}", filePath);
_mediaErrorService.ReportMediaIssue(filePath, MediaErrorProducer.BookService,
"There was an exception parsing PDF metadata", ex);
ex.Message == "Encryption not supported"
? "Encrypted PDFs are not supported"
: "There was an exception parsing PDF metadata", ex);
}
return null;
}
}

View File

@ -1,23 +1,51 @@
/**
* Contributed by https://github.com/microtherion
*
* All references to the "PDF Spec" (section numbers, etc) refer to the
* PDF 1.7 Specification a.k.a. PDF32000-1:2008
* https://opensource.adobe.com/dc-acrobat-sdk-docs/pdfstandards/PDF32000_2008.pdf
*/
using System;
using System.Collections.Generic;
using System.IO.Compression;
using System.Text;
using System.Xml;
using System.IO;
using Microsoft.Extensions.Logging;
using API.Services;
using Microsoft.Extensions.Logging;
namespace API.Helpers;
#nullable enable
/**
* Contributed by https://github.com/microtherion
*
* All references to the "PDF Spec" (section numbers, etc.) refer to the
* PDF 1.7 Specification a.k.a. PDF32000-1:2008
* https://opensource.adobe.com/dc-acrobat-sdk-docs/pdfstandards/PDF32000_2008.pdf
*/
/**
* Reference for PDF Metadata Format
%PDF-1.4 Header
Object 1 0 obj Objects containing content
<< /Type /Catalog ... >>
endobj
Object 2 0 obj
<< /Type /Info ... >>
endobj
...more objects...
xref Cross-reference table
0 6
0000000000 65535 f
0000000015 00000 n Object 1 is at byte offset 15
0000000109 00000 n Object 2 is at byte offset 109
...
trailer Trailer dictionary
<< /Size 6 /Root 1 0 R /Info 2 0 R >>
startxref
1234 Byte offset where xref starts
%%EOF
*/
/// <summary>
/// Parse PDF file and try to extract as much metadata as possible.
/// Supports both text based XRef tables and compressed XRef streams (Deflate only).
@ -41,17 +69,17 @@ public class PdfMetadataExtractorException : Exception
}
}
public interface IPdfMetadataExtractor
public interface IPdfMetadataExtractor : IDisposable
{
Dictionary<String, String> GetMetadata();
Dictionary<string, string> GetMetadata();
}
class PdfStringBuilder
internal class PdfStringBuilder
{
private readonly StringBuilder _builder = new();
private bool _secondByte = false;
private byte _prevByte = 0;
private bool _isUnicode = false;
private bool _secondByte;
private byte _prevByte;
private bool _isUnicode;
// PDFDocEncoding defined in PDF Spec D.1
@ -71,11 +99,11 @@ class PdfStringBuilder
private void AppendPdfDocByte(byte b)
{
if (b >= 0x18 && b < 0x20)
if (b is >= 0x18 and < 0x20)
{
_builder.Append(_pdfDocMappingLow[b - 0x18]);
}
else if (b >= 0x80 && b < 0xA1)
else if (b is >= 0x80 and < 0xA1)
{
_builder.Append(_pdfDocMappingHigh[b - 0x80]);
}
@ -95,28 +123,24 @@ class PdfStringBuilder
// PDF Spec 7.9.2.1: Strings are either UTF-16BE or PDFDocEncoded
if (_builder.Length == 0 && !_isUnicode)
{
// Unicode strings are prefixed by a big endian BOM \uFEFF
if (_secondByte)
switch (_secondByte)
{
if (b == 0xFF)
{
// Unicode strings are prefixed by a big endian BOM \uFEFF
case true when b == 0xFF:
_isUnicode = true;
_secondByte = false;
}
else
{
break;
case true:
AppendPdfDocByte(_prevByte);
AppendPdfDocByte(b);
}
}
else if (!_secondByte && b == 0xFE)
{
_secondByte = true;
_prevByte = b;
}
else
{
AppendPdfDocByte(b);
break;
case false when b == 0xFE:
_secondByte = true;
_prevByte = b;
break;
default:
AppendPdfDocByte(b);
break;
}
}
else if (_isUnicode)
@ -138,7 +162,7 @@ class PdfStringBuilder
}
}
override public string ToString()
public override string ToString()
{
if (_builder.Length == 0 && _secondByte)
{
@ -153,8 +177,8 @@ internal class PdfLexer(Stream stream)
{
private const int BufferSize = 1024;
private readonly byte[] _buffer = new byte[BufferSize];
private int _pos = 0;
private int _valid = 0;
private int _pos;
private int _valid;
public enum TokenType
{
@ -353,10 +377,8 @@ internal class PdfLexer(Stream stream)
{
return (long)token.Value;
}
else
{
throw new PdfMetadataExtractorException("Expected integer after startxref keyword");
}
throw new PdfMetadataExtractorException("Expected integer after startxref keyword");
}
continue;
@ -367,10 +389,18 @@ internal class PdfLexer(Stream stream)
}
}
public bool NextXRefEntry(ref long obj, ref int generation)
/// <summary>
///
/// </summary>
/// <example>
/// 0000000015 00000 n ← offset=15, generation=0, in-use
/// 0000000109 00000 n ← offset=109, generation=0, in-use
/// 0000000000 65535 f ← offset=0, generation=65535, free
/// </example>
/// <remarks>Cross-reference table entry as per PDF Spec 7.5.4</remarks>
/// <exception cref="PdfMetadataExtractorException"></exception>
public bool NextXRefEntry(out long offset, out int generation)
{
// Cross-reference table entry as per PDF Spec 7.5.4
WantLookahead(20);
if (_valid - _pos < 20)
@ -378,14 +408,11 @@ internal class PdfLexer(Stream stream)
throw new PdfMetadataExtractorException("End of stream");
}
var inUse = true;
// Parse the 20-byte XRef entry: "nnnnnnnnnn ggggg n/f \r\n"
offset = Convert.ToInt64(Encoding.ASCII.GetString(_buffer, _pos, 10).Trim());
generation = Convert.ToInt32(Encoding.ASCII.GetString(_buffer, _pos + 11, 5).Trim());
if (obj == 0)
{
obj = Convert.ToInt64(Encoding.ASCII.GetString(_buffer, _pos, 10));
generation = Convert.ToInt32(Encoding.ASCII.GetString(_buffer, _pos + 11, 5));
inUse = _buffer[_pos + 17] == 'n';
}
var inUse = _buffer[_pos + 17] == 'n';
_pos += 20;
@ -503,7 +530,7 @@ internal class PdfLexer(Stream stream)
{
StringBuilder sb = new();
var hasDot = LastByte() == '.';
var followedBySpace = false;
bool followedBySpace;
sb.Append((char)LastByte());
@ -647,7 +674,8 @@ internal class PdfLexer(Stream stream)
case '(':
parenLevel++;
goto default;
sb.AppendByte(b);
break;
case ')':
if (--parenLevel == 0)
@ -655,7 +683,8 @@ internal class PdfLexer(Stream stream)
return new Token(TokenType.String, sb.ToString());
}
goto default;
sb.AppendByte(b);
break;
case '\\':
b = NextByte();
@ -688,7 +717,6 @@ internal class PdfLexer(Stream stream)
break;
case >= '0' and <= '7':
var b1 = b;
var b2 = NextByte();
var b3 = NextByte();
@ -697,7 +725,7 @@ internal class PdfLexer(Stream stream)
throw new PdfMetadataExtractorException("Invalid octal escape, got {b1}{b2}{b3}");
}
sb.AppendByte((byte)((b1 - '0') << 6 | (b2 - '0') << 3 | (b3 - '0')));
sb.AppendByte((byte)((b - '0') << 6 | (b2 - '0') << 3 | (b3 - '0')));
break;
}
@ -763,26 +791,15 @@ internal class PdfLexer(Stream stream)
}
}
switch (sb.ToString())
return sb.ToString() switch
{
case "true":
return new Token(TokenType.Bool, true);
case "false":
return new Token(TokenType.Bool, false);
case "stream":
return new Token(TokenType.StreamStart, true);
case "endstream":
return new Token(TokenType.StreamEnd, true);
case "endobj":
return new Token(TokenType.ObjectEnd, true);
default:
return new Token(TokenType.Keyword, sb.ToString());
}
"true" => new Token(TokenType.Bool, true),
"false" => new Token(TokenType.Bool, false),
"stream" => new Token(TokenType.StreamStart, true),
"endstream" => new Token(TokenType.StreamEnd, true),
"endobj" => new Token(TokenType.ObjectEnd, true),
_ => new Token(TokenType.Keyword, sb.ToString())
};
}
}
@ -791,9 +808,10 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
private readonly ILogger<BookService> _logger;
private readonly PdfLexer _lexer;
private readonly FileStream _stream;
private long[] _objectOffsets = new long[0];
private readonly Dictionary<long, long> _objectOffsets = [];
private readonly Dictionary<string, string> _metadata = [];
private readonly Stack<MetadataRef> _metadataRef = new();
private bool _disposed;
private struct MetadataRef(long root, long info)
{
@ -801,7 +819,7 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
public long Info = info;
}
private struct XRefSection(long first, long count)
private readonly struct XRefSection(long first, long count)
{
public readonly long First = first;
public readonly long Count = count;
@ -822,7 +840,9 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
return _metadata;
}
#pragma warning disable S1144
private void LogMetadata(string filename)
#pragma warning restore S1144
{
_logger.LogTrace("Metadata for {Path}:", filename);
@ -854,14 +874,11 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
if (!_lexer.TestByte((byte)'x'))
{
// Cross-reference stream (PDF Spec 7.5.8)
ReadXRefStream();
return;
}
// Cross-reference table (PDF Spec 7.5.4)
var token = _lexer.NextToken();
if (token.Type != PdfLexer.TokenType.Keyword || (string)token.Value != "xref")
@ -885,23 +902,17 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
var numObj = (long)token.Value;
if (_objectOffsets.Length < startObj + numObj)
{
Array.Resize(ref _objectOffsets, (int)(startObj + numObj));
}
_lexer.ExpectNewline();
var generation = 0;
for (var obj = startObj; obj < startObj + numObj; ++obj)
{
var inUse = _lexer.NextXRefEntry(ref _objectOffsets[obj], ref generation);
var inUse = _lexer.NextXRefEntry(out var offset, out var generation);
if (!inUse)
if (inUse && offset > 0)
{
_objectOffsets[obj] = 0;
_objectOffsets[obj] = offset ;
}
// Free objects (inUse == false) are not stored in the dictionary
}
}
else if (token.Type == PdfLexer.TokenType.Keyword && (string)token.Value == "trailer")
@ -1105,11 +1116,6 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
{
var section = sections.Dequeue();
if (_objectOffsets.Length < size)
{
Array.Resize(ref _objectOffsets, (int)size);
}
for (var i = section.First; i < section.First + section.Count; ++i)
{
long type = 0;
@ -1136,9 +1142,9 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
generation = (generation << 8) | (ushort)stream.ReadByte();
}
if (type == 1 && _objectOffsets[i] == 0)
if (type == 1)
{
_objectOffsets[i] = offset;
_objectOffsets.TryAdd(i, offset);
}
}
}
@ -1253,7 +1259,7 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
{
var meta = _metadataRef.Pop();
//_logger.LogTrace("DocumentCatalog for {Path}: {Root}, Info: {Info}", filename, meta.root, meta.info);
_logger.LogTrace("DocumentCatalog for {Path}: {Root}, Info: {Info}", filename, meta.Root, meta.Info);
ReadMetadataFromInfo(meta.Info);
ReadMetadataFromXml(MetadataObjInObjectCatalog(meta.Root));
@ -1265,7 +1271,7 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
// Document information dictionary (PDF Spec 14.3.3)
// We treat this as less authoritative than the Metadata stream.
if (infoObj < 1 || infoObj >= _objectOffsets.Length || _objectOffsets[infoObj] == 0)
if (!HasObject(infoObj))
{
return;
}
@ -1338,7 +1344,7 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
{
// Look for /Metadata entry in document catalog (PDF Spec 7.7.2)
if (rootObj < 1 || rootObj >= _objectOffsets.Length || _objectOffsets[rootObj] == 0)
if (!HasObject(rootObj))
{
return -1;
}
@ -1416,7 +1422,7 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
private void ReadMetadataFromXml(long meta)
{
if (meta < 1 || meta >= _objectOffsets.Length || _objectOffsets[meta] == 0) return;
if (!HasObject(meta)) return;
_stream.Seek(_objectOffsets[meta], SeekOrigin.Begin);
_lexer.ResetBuffer();
@ -1634,4 +1640,28 @@ internal class PdfMetadataExtractor : IPdfMetadataExtractor
SkipValue();
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (_disposed || !disposing) return;
_stream.Dispose();
_disposed = true;
}
private bool HasObject(long objNum)
{
return _objectOffsets.ContainsKey(objNum) && _objectOffsets[objNum] > 0;
}
private long GetObjectOffset(long objNum)
{
return _objectOffsets.TryGetValue(objNum, out var offset) ? offset : 0;
}
}

View File

@ -2,6 +2,7 @@
"confirm-email": "You must confirm your email first",
"locked-out": "You've been locked out from too many authorization attempts. Please wait 10 minutes.",
"disabled-account": "Your account is disabled. Contact the server admin.",
"password-authentication-disabled": "Password authentication has been disabled, login via OpenID Connect",
"register-user": "Something went wrong when registering user",
"validate-email": "There was an issue validating your email: {0}",
"confirm-token-gen": "There was an issue generating a confirmation token",
@ -17,6 +18,8 @@
"generate-token": "There was an issue generating a confirmation email token. See logs",
"age-restriction-update": "There was an error updating the age restriction",
"no-user": "User does not exist",
"oidc-managed": "Users managed by OIDC cannot be edited.",
"cannot-change-identity-provider-original-user": "Identity Provider of the original admin account cannot be changed",
"username-taken": "Username already taken",
"email-taken": "Email already in use",
"user-already-confirmed": "User is already confirmed",
@ -42,6 +45,7 @@
"email-not-enabled": "Email is not enabled on this server. You cannot perform this action.",
"account-email-invalid": "The email on file for the admin account is not a valid email. Cannot send test email.",
"email-settings-invalid": "Email settings missing information. Ensure all email settings are saved.",
"oidc-invalid-authority": "OIDC authority is invalid",
"chapter-doesnt-exist": "Chapter does not exist",
"file-missing": "File was not found in book",

View File

@ -207,5 +207,7 @@
"sidenav-stream-only-delete-smart-filter": "Seuls les flux de filtres intelligents peuvent être supprimés de la SideNav",
"dashboard-stream-only-delete-smart-filter": "Seuls les flux de filtres intelligents peuvent être supprimés du tableau de bord",
"smart-filter-name-required": "Nom du filtre intelligent requis",
"smart-filter-system-name": "Vous ne pouvez pas utiliser le nom d'un flux fourni par le système"
"smart-filter-system-name": "Vous ne pouvez pas utiliser le nom d'un flux fourni par le système",
"aliases-have-overlap": "Un ou plusieurs alias se chevauchent avec d'autres personnes et ne peuvent pas être mis à jour",
"generated-reading-profile-name": "Généré à partir de {0}"
}

View File

@ -1,5 +1,5 @@
{
"disabled-account": "Váš účet je zakázaný. Kontaktujte správcu servera.",
"disabled-account": "Váš účet je deaktivovaný. Kontaktujte správcu servera.",
"register-user": "Niečo sa pokazilo pri registrácii užívateľa",
"confirm-email": "Najprv musíte potvrdiť svoj e-mail",
"locked-out": "Boli ste zamknutí z dôvodu veľkého počtu neúspešných pokusov o prihlásenie. Počkajte 10 minút.",
@ -88,5 +88,126 @@
"generic-device-create": "Pri vytváraní zariadenia sa vyskytla chyba",
"series-doesnt-exist": "Séria neexistuje",
"volume-doesnt-exist": "Zväzok neexistuje",
"library-name-exists": "Názov knižnice už existuje. Prosím, vyberte si pre daný server jedinečný názov."
"library-name-exists": "Názov knižnice už existuje. Prosím, vyberte si pre daný server jedinečný názov.",
"cache-file-find": "Nepodarilo sa nájsť obrázok vo vyrovnávacej pamäti. Znova načítajte a skúste to znova.",
"name-required": "Názov nemôže byť prázdny",
"valid-number": "Musí to byť platné číslo strany",
"duplicate-bookmark": "Duplicitný záznam záložky už existuje",
"reading-list-permission": "Nemáte povolenia na tento zoznam na čítanie alebo zoznam neexistuje",
"reading-list-position": "Nepodarilo sa aktualizovať pozíciu",
"reading-list-updated": "Aktualizované",
"reading-list-item-delete": "Položku(y) sa nepodarilo odstrániť",
"reading-list-deleted": "Zoznam na čítanie bol odstránený",
"generic-reading-list-delete": "Pri odstraňovaní zoznamu na čítanie sa vyskytol problém",
"generic-reading-list-update": "Pri aktualizácii zoznamu na čítanie sa vyskytol problém",
"generic-reading-list-create": "Pri vytváraní zoznamu na čítanie sa vyskytol problém",
"reading-list-doesnt-exist": "Zoznam na čítanie neexistuje",
"series-restricted": "Používateľ nemá prístup k tejto sérii",
"generic-scrobble-hold": "Pri pauznutí funkcie sa vyskytla chyba",
"libraries-restricted": "Používateľ nemá prístup k žiadnym knižniciam",
"no-series": "Nepodarilo sa získať sériu pre knižnicu",
"no-series-collection": "Nepodarilo sa získať sériu pre kolekciu",
"generic-series-delete": "Pri odstraňovaní série sa vyskytol problém",
"generic-series-update": "Pri aktualizácii série sa vyskytla chyba",
"series-updated": "Úspešne aktualizované",
"update-metadata-fail": "Nepodarilo sa aktualizovať metadáta",
"age-restriction-not-applicable": "Bez obmedzenia",
"generic-relationship": "Pri aktualizácii vzťahov sa vyskytol problém",
"job-already-running": "Úloha už beží",
"encode-as-warning": "Nedá sa konvertovať do formátu PNG. Pre obaly použite možnosť Obnoviť obaly. Záložky a favicony sa nedajú spätne zakódovať.",
"ip-address-invalid": "IP adresa „{0}“ je neplatná",
"bookmark-dir-permissions": "Adresár záložiek nemá správne povolenia pre použitie v aplikácii Kavita",
"total-backups": "Celkový počet záloh musí byť medzi 1 a 30",
"total-logs": "Celkový počet protokolov musí byť medzi 1 a 30",
"stats-permission-denied": "Nemáte oprávnenie zobraziť si štatistiky iného používateľa",
"url-not-valid": "URL nevracia platný obrázok alebo vyžaduje autorizáciu",
"url-required": "Na použitie musíte zadať URL adresu",
"generic-cover-series-save": "Obrázok obálky sa nepodarilo uložiť do série",
"generic-cover-collection-save": "Obrázok obálky sa nepodarilo uložiť do kolekcie",
"generic-cover-reading-list-save": "Obrázok obálky sa nepodarilo uložiť do zoznamu na čítanie",
"generic-cover-chapter-save": "Obrázok obálky sa nepodarilo uložiť do kapitoly",
"generic-cover-library-save": "Obrázok obálky sa nepodarilo uložiť do knižnice",
"generic-cover-person-save": "Obrázok obálky sa nepodarilo uložiť k tejto osobe",
"generic-cover-volume-save": "Obrázok obálky sa nepodarilo uložiť do zväzku",
"access-denied": "Nemáte prístup",
"reset-chapter-lock": "Nepodarilo sa resetovať zámok obalu pre kapitolu",
"generic-user-delete": "Používateľa sa nepodarilo odstrániť",
"generic-user-pref": "Pri ukladaní predvolieb sa vyskytol problém",
"opds-disabled": "OPDS nie je na tomto serveri povolený",
"on-deck": "Pokračovať v čítaní",
"browse-on-deck": "Prehliadať pokračovanie v čítaní",
"recently-added": "Nedávno pridané",
"want-to-read": "Chcem čítať",
"browse-want-to-read": "Prehliadať Chcem si prečítať",
"browse-recently-added": "Prehliadať nedávno pridané",
"reading-lists": "Zoznamy na čítanie",
"browse-reading-lists": "Prehliadať podľa zoznamov na čítanie",
"libraries": "Všetky knižnice",
"browse-libraries": "Prehliadať podľa knižníc",
"collections": "Všetky kolekcie",
"browse-collections": "Prehliadať podľa kolekcií",
"more-in-genre": "Viac v žánri {0}",
"browse-more-in-genre": "Prezrite si viac v {0}",
"recently-updated": "Nedávno aktualizované",
"browse-recently-updated": "Prehliadať nedávno aktualizované",
"smart-filters": "Inteligentné filtre",
"external-sources": "Externé zdroje",
"browse-external-sources": "Prehliadať externé zdroje",
"browse-smart-filters": "Prehliadať podľa inteligentných filtrov",
"reading-list-restricted": "Zoznam na čítanie neexistuje alebo k nemu nemáte prístup",
"query-required": "Musíte zadať parameter dopytu",
"search": "Hľadať",
"search-description": "Vyhľadávanie sérií, zbierok alebo zoznamov na čítanie",
"favicon-doesnt-exist": "Favicon neexistuje",
"smart-filter-doesnt-exist": "Inteligentný filter neexistuje",
"smart-filter-already-in-use": "Existuje existujúci stream s týmto inteligentným filtrom",
"dashboard-stream-doesnt-exist": "Stream dashboardu neexistuje",
"sidenav-stream-doesnt-exist": "SideNav Stream neexistuje",
"external-source-already-exists": "Externý zdroj už existuje",
"external-source-required": "Vyžaduje sa kľúč API a Host",
"external-source-doesnt-exist": "Externý zdroj neexistuje",
"external-source-already-in-use": "S týmto externým zdrojom existuje stream",
"sidenav-stream-only-delete-smart-filter": "Z bočného panela SideNav je možné odstrániť iba streamy inteligentných filtrov",
"dashboard-stream-only-delete-smart-filter": "Z ovládacieho panela je možné odstrániť iba streamy inteligentných filtrov",
"smart-filter-name-required": "Názov inteligentného filtra je povinný",
"smart-filter-system-name": "Nemôžete použiť názov streamu poskytnutého systémom",
"not-authenticated": "Používateľ nie je overený",
"unable-to-register-k+": "Licenciu sa nepodarilo zaregistrovať z dôvodu chyby. Kontaktujte podporu Kavita+",
"unable-to-reset-k+": "Licenciu Kavita+ sa nepodarilo resetovať z dôvodu chyby. Kontaktujte podporu Kavita+",
"anilist-cred-expired": "Prihlasovacie údaje AniList vypršali alebo chýbajú",
"scrobble-bad-payload": "Nesprávne údaje od poskytovateľa Scrobblovania",
"theme-doesnt-exist": "Súbor témy chýba alebo je neplatný",
"bad-copy-files-for-download": "Súbory sa nepodarilo skopírovať do dočasného adresára na stiahnutie archívu.",
"generic-create-temp-archive": "Pri vytváraní dočasného archívu sa vyskytla chyba",
"epub-malformed": "Súbor je nesprávne naformátovaný! Nedá sa prečítať.",
"epub-html-missing": "Zodpovedajúci súbor HTML pre túto stránku sa nenašiel",
"collection-tag-title-required": "Názov kolekcie nemôže byť prázdny",
"reading-list-title-required": "Názov zoznamu na čítanie nemôže byť prázdny",
"collection-tag-duplicate": "Kolekcia s týmto názvom už existuje",
"device-duplicate": "Zariadenie s týmto názvom už existuje",
"device-not-created": "Toto zariadenie ešte neexistuje. Najprv ho vytvorte",
"send-to-permission": "Nie je možné odoslať súbory iné ako EPUB alebo PDF na zariadenia, pretože nie sú podporované na Kindle",
"progress-must-exist": "Pokrok musí byť u používateľa k dispozícii",
"reading-list-name-exists": "Zoznam na prečítanie s týmto menom už existuje",
"user-no-access-library-from-series": "Používateľ nemá prístup do knižnice, do ktorej táto séria patrí",
"series-restricted-age-restriction": "Používateľ si nemôže pozrieť túto sériu z dôvodu vekového obmedzenia",
"kavitaplus-restricted": "Toto je obmedzené iba na Kavita+",
"aliases-have-overlap": "Jeden alebo viacero aliasov sa prekrýva s inými osobami, nie je možné ich aktualizovať",
"volume-num": "Zväzok {0}",
"book-num": "Kniha {0}",
"issue-num": "Problém {0}{1}",
"chapter-num": "Kapitola {0}",
"check-updates": "Skontrolovať aktualizácie",
"license-check": "Kontrola licencie",
"process-scrobbling-events": "Udalosti procesu scrobblovania",
"report-stats": "Štatistiky hlásení",
"check-scrobbling-tokens": "Skontrolujte Tokeny Scrobblingu",
"cleanup": "Čistenie",
"process-processed-scrobbling-events": "Spracovať udalosti scrobblovania",
"remove-from-want-to-read": "Upratanie listu Chcem si prečítať",
"scan-libraries": "Skenovanie knižníc",
"kavita+-data-refresh": "Obnovenie údajov Kavita+",
"backup": "Záloha",
"update-yearly-stats": "Aktualizovať ročné štatistiky",
"generated-reading-profile-name": "Vygenerované z {0}"
}

View File

@ -1,19 +1,22 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Web;
using API.Constants;
using API.Data;
using API.Data.Repositories;
using API.DTOs.Account;
using API.Entities;
using API.Entities.Enums;
using API.Errors;
using API.Extensions;
using API.Helpers.Builders;
using API.SignalR;
using AutoMapper;
using Kavita.Common;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace API.Services;
@ -24,25 +27,56 @@ public interface IAccountService
{
Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword);
Task<IEnumerable<ApiException>> ValidatePassword(AppUser user, string password);
Task<IEnumerable<ApiException>> ValidateUsername(string username);
Task<IEnumerable<ApiException>> ValidateUsername(string? username);
Task<IEnumerable<ApiException>> ValidateEmail(string email);
Task<bool> HasBookmarkPermission(AppUser? user);
Task<bool> HasDownloadPermission(AppUser? user);
Task<bool> CanChangeAgeRestriction(AppUser? user);
/// <summary>
///
/// </summary>
/// <param name="actingUserId">The user who is changing the identity</param>
/// <param name="user">the user being changed</param>
/// <param name="identityProvider"> the provider being changed to</param>
/// <returns>If true, user should not be updated by kavita (anymore)</returns>
/// <exception cref="KavitaException">Throws if invalid actions are being performed</exception>
Task<bool> ChangeIdentityProvider(int actingUserId, AppUser user, IdentityProvider identityProvider);
/// <summary>
/// Removes access to all libraries, then grant access to all given libraries or all libraries if the user is admin.
/// Creates side nav streams as well
/// </summary>
/// <param name="user"></param>
/// <param name="librariesIds"></param>
/// <param name="hasAdminRole"></param>
/// <returns></returns>
/// <remarks>Ensure that the users SideNavStreams are loaded</remarks>
/// <remarks>Does NOT commit</remarks>
Task UpdateLibrariesForUser(AppUser user, IList<int> librariesIds, bool hasAdminRole);
Task<IEnumerable<IdentityError>> UpdateRolesForUser(AppUser user, IList<string> roles);
void AddDefaultStreamsToUser(AppUser user);
Task AddDefaultReadingProfileToUser(AppUser user);
}
public class AccountService : IAccountService
public partial class AccountService : IAccountService
{
private readonly ILocalizationService _localizationService;
private readonly UserManager<AppUser> _userManager;
private readonly ILogger<AccountService> _logger;
private readonly IUnitOfWork _unitOfWork;
private readonly IMapper _mapper;
public const string DefaultPassword = "[k.2@RZ!mxCQkJzE";
public static readonly Regex AllowedUsernameRegex = AllowedUsernameRegexAttr();
public AccountService(UserManager<AppUser> userManager, ILogger<AccountService> logger, IUnitOfWork unitOfWork)
public AccountService(UserManager<AppUser> userManager, ILogger<AccountService> logger, IUnitOfWork unitOfWork,
IMapper mapper, ILocalizationService localizationService)
{
_localizationService = localizationService;
_userManager = userManager;
_logger = logger;
_unitOfWork = unitOfWork;
_mapper = mapper;
}
public async Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword)
@ -77,8 +111,13 @@ public class AccountService : IAccountService
return Array.Empty<ApiException>();
}
public async Task<IEnumerable<ApiException>> ValidateUsername(string username)
public async Task<IEnumerable<ApiException>> ValidateUsername(string? username)
{
if (string.IsNullOrWhiteSpace(username) || !AllowedUsernameRegex.IsMatch(username))
{
return [new ApiException(400, "Invalid username")];
}
// Reverted because of https://go.microsoft.com/fwlink/?linkid=2129535
if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName != null
&& x.NormalizedUserName == username.ToUpper()))
@ -143,4 +182,113 @@ public class AccountService : IAccountService
return roles.Contains(PolicyConstants.ChangePasswordRole) || roles.Contains(PolicyConstants.AdminRole);
}
public async Task<bool> ChangeIdentityProvider(int actingUserId, AppUser user, IdentityProvider identityProvider)
{
var defaultAdminUser = await _unitOfWork.UserRepository.GetDefaultAdminUser();
if (user.Id == defaultAdminUser.Id)
{
throw new KavitaException(await _localizationService.Translate(actingUserId, "cannot-change-identity-provider-original-user"));
}
// Allow changes if users aren't being synced
var oidcSettings = (await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).OidcConfig;
if (!oidcSettings.SyncUserSettings)
{
user.IdentityProvider = identityProvider;
await _unitOfWork.CommitAsync();
return false;
}
// Don't allow changes to the user if they're managed by oidc, and their identity provider isn't being changed to something else
if (user.IdentityProvider == IdentityProvider.OpenIdConnect && identityProvider == IdentityProvider.OpenIdConnect)
{
throw new KavitaException(await _localizationService.Translate(actingUserId, "oidc-managed"));
}
user.IdentityProvider = identityProvider;
await _unitOfWork.CommitAsync();
return user.IdentityProvider == IdentityProvider.OpenIdConnect;
}
public async Task UpdateLibrariesForUser(AppUser user, IList<int> librariesIds, bool hasAdminRole)
{
var allLibraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync(LibraryIncludes.AppUser)).ToList();
var currentLibrary = allLibraries.Where(l => l.AppUsers.Contains(user)).ToList();
List<Library> libraries;
if (hasAdminRole)
{
_logger.LogDebug("{UserId} is admin. Granting access to all libraries", user.Id);
libraries = allLibraries;
}
else
{
libraries = allLibraries.Where(lib => librariesIds.Contains(lib.Id)).ToList();
}
var toRemove = currentLibrary.Except(libraries);
var toAdd = libraries.Except(currentLibrary);
foreach (var lib in toRemove)
{
lib.AppUsers ??= [];
lib.AppUsers.Remove(user);
user.RemoveSideNavFromLibrary(lib);
}
foreach (var lib in toAdd)
{
lib.AppUsers ??= [];
lib.AppUsers.Add(user);
user.CreateSideNavFromLibrary(lib);
}
}
public async Task<IEnumerable<IdentityError>> UpdateRolesForUser(AppUser user, IList<string> roles)
{
var existingRoles = await _userManager.GetRolesAsync(user);
var hasAdminRole = roles.Contains(PolicyConstants.AdminRole);
if (!hasAdminRole)
{
roles.Add(PolicyConstants.PlebRole);
}
if (existingRoles.Except(roles).Any() || roles.Except(existingRoles).Any())
{
var roleResult = await _userManager.RemoveFromRolesAsync(user, existingRoles);
if (!roleResult.Succeeded) return roleResult.Errors;
roleResult = await _userManager.AddToRolesAsync(user, roles);
if (!roleResult.Succeeded) return roleResult.Errors;
}
return [];
}
public void AddDefaultStreamsToUser(AppUser user)
{
foreach (var newStream in Seed.DefaultStreams.Select(_mapper.Map<AppUserDashboardStream, AppUserDashboardStream>))
{
user.DashboardStreams.Add(newStream);
}
foreach (var stream in Seed.DefaultSideNavStreams.Select(_mapper.Map<AppUserSideNavStream, AppUserSideNavStream>))
{
user.SideNavStreams.Add(stream);
}
}
public async Task AddDefaultReadingProfileToUser(AppUser user)
{
var profile = new AppUserReadingProfileBuilder(user.Id)
.WithName("Default Profile")
.WithKind(ReadingProfileKind.Default)
.Build();
_unitOfWork.AppUserReadingProfileRepository.Add(profile);
await _unitOfWork.CommitAsync();
}
[GeneratedRegex(@"^[a-zA-Z0-9\-._@+/]*$")]
private static partial Regex AllowedUsernameRegexAttr();
}

View File

@ -679,10 +679,8 @@ public class BookService : IBookService
{
return _pdfComicInfoExtractor.GetComicInfo(filePath);
}
else
{
return GetEpubComicInfo(filePath);
}
return GetEpubComicInfo(filePath);
}
private static void ExtractSortTitle(EpubMetadataMeta metadataItem, EpubBookRef epubBook, ComicInfo info)

View File

@ -30,8 +30,9 @@ public interface IImageService
/// <param name="fileName"></param>
/// <param name="encodeFormat">Convert and save as encoding format</param>
/// <param name="thumbnailWidth">Width of thumbnail</param>
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
string CreateThumbnailFromBase64(string encodedImage, string fileName, EncodeFormat encodeFormat, int thumbnailWidth = 320);
/// <param name="targetDirectory">If null, will write to <see cref="DirectoryService.CoverImageDirectory"/></param>
/// <returns>File name with extension of the file. </returns>
string CreateThumbnailFromBase64(string encodedImage, string fileName, EncodeFormat encodeFormat, int thumbnailWidth = 320, string? targetDirectory = null);
/// <summary>
/// Writes out a thumbnail by stream input
/// </summary>
@ -576,14 +577,16 @@ public class ImageService : IImageService
/// <inheritdoc />
public string CreateThumbnailFromBase64(string encodedImage, string fileName, EncodeFormat encodeFormat, int thumbnailWidth = ThumbnailWidth)
public string CreateThumbnailFromBase64(string encodedImage, string fileName, EncodeFormat encodeFormat, int thumbnailWidth = ThumbnailWidth, string? targetDirectory = null)
{
// TODO: This code has no concept of cropping nor Thumbnail Size
try
{
targetDirectory ??= _directoryService.CoverImageDirectory;
using var thumbnail = Image.ThumbnailBuffer(Convert.FromBase64String(encodedImage), thumbnailWidth);
fileName += encodeFormat.GetExtension();
thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, fileName));
thumbnail.WriteToFile(_directoryService.FileSystem.Path.Join(targetDirectory, fileName));
return fileName;
}
catch (Exception e)

679
API/Services/OidcService.cs Normal file
View File

@ -0,0 +1,679 @@
#nullable enable
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Globalization;
using System.IdentityModel.Tokens.Jwt;
using System.Linq;
using System.Security.Claims;
using System.Threading.Tasks;
using API.Constants;
using API.Data;
using API.Data.Repositories;
using API.DTOs.Email;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Builders;
using Hangfire;
using Flurl.Http;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Protocols;
using Microsoft.IdentityModel.Protocols.OpenIdConnect;
using Microsoft.IdentityModel.Tokens;
namespace API.Services;
public interface IOidcService
{
/// <summary>
/// Returns the user authenticated with OpenID Connect
/// </summary>
/// <param name="request"></param>
/// <param name="principal"></param>
/// <returns></returns>
/// <exception cref="KavitaException">if any requirements aren't met</exception>
Task<AppUser?> LoginOrCreate(HttpRequest request, ClaimsPrincipal principal);
/// <summary>
/// Refresh the token inside the cookie when it's close to expiring. And sync the user
/// </summary>
/// <param name="ctx"></param>
/// <returns></returns>
/// <remarks>If the token is refreshed successfully, updates the last active time of the suer</remarks>
Task<AppUser?> RefreshCookieToken(CookieValidatePrincipalContext ctx);
/// <summary>
/// Remove <see cref="AppUser.OidcId"/> from all users
/// </summary>
/// <returns></returns>
Task ClearOidcIds();
}
public class OidcService(ILogger<OidcService> logger, UserManager<AppUser> userManager,
IUnitOfWork unitOfWork, IAccountService accountService, IEmailService emailService): IOidcService
{
public const string LibraryAccessPrefix = "library-";
public const string AgeRestrictionPrefix = "age-restriction-";
public const string IncludeUnknowns = "include-unknowns";
public const string RefreshToken = "refresh_token";
public const string IdToken = "id_token";
public const string ExpiresAt = "expires_at";
/// The name of the Auth Cookie set by .NET
public const string CookieName = ".AspNetCore.Cookies";
/// <summary>
/// The ConfigurationManager will refresh the configuration periodically to ensure the data stays up to date
/// We can store the same one indefinitely as the authority does not change unless Kavita is restarted
/// </summary>
/// <remarks>The ConfigurationManager has its own lock, it loads data thread safe</remarks>
private static readonly ConfigurationManager<OpenIdConnectConfiguration> OidcConfigurationManager;
private static readonly ConcurrentDictionary<string, bool> RefreshInProgress = new();
private static readonly ConcurrentDictionary<string, DateTimeOffset> LastFailedRefresh = new();
#pragma warning disable S3963
static OidcService()
{
var authority = Configuration.OidcSettings.Authority;
var hasTrailingSlash = authority.EndsWith('/');
var url = authority + (hasTrailingSlash ? string.Empty : "/") + ".well-known/openid-configuration";
OidcConfigurationManager = new ConfigurationManager<OpenIdConnectConfiguration>(
url,
new OpenIdConnectConfigurationRetriever(),
new HttpDocumentRetriever { RequireHttps = url.StartsWith("https") }
);
}
#pragma warning restore S3963
public async Task<AppUser?> LoginOrCreate(HttpRequest request, ClaimsPrincipal principal)
{
var settings = (await unitOfWork.SettingsRepository.GetSettingsDtoAsync()).OidcConfig;
var oidcId = principal.FindFirstValue(ClaimTypes.NameIdentifier);
if (string.IsNullOrEmpty(oidcId))
{
throw new KavitaException("errors.oidc.missing-external-id");
}
var user = await unitOfWork.UserRepository.GetByOidcId(oidcId, AppUserIncludes.UserPreferences);
if (user != null)
{
await SyncUserSettings(request, settings, principal, user);
return user;
}
var email = principal.FindFirstValue(ClaimTypes.Email);
if (string.IsNullOrEmpty(email))
{
throw new KavitaException("errors.oidc.missing-email");
}
if (settings.RequireVerifiedEmail && !principal.HasVerifiedEmail())
{
throw new KavitaException("errors.oidc.email-not-verified");
}
user = await unitOfWork.UserRepository.GetUserByEmailAsync(email, AppUserIncludes.UserPreferences | AppUserIncludes.SideNavStreams);
if (user != null)
{
// Don't allow taking over accounts
// This could happen if the user changes their email in OIDC, and then someone else uses the old one
if (!string.IsNullOrEmpty(user.OidcId))
{
throw new KavitaException("errors.oidc.email-in-use");
}
logger.LogDebug("User {UserName} has matched on email to {OidcId}", user.Id, oidcId);
user.OidcId = oidcId;
await unitOfWork.CommitAsync();
await SyncUserSettings(request, settings, principal, user);
return user;
}
return await CreateNewAccount(request, principal, settings, oidcId);
}
public async Task<AppUser?> RefreshCookieToken(CookieValidatePrincipalContext ctx)
{
if (ctx.Principal == null) return null;
var user = await unitOfWork.UserRepository.GetUserByIdAsync(ctx.Principal.GetUserId()) ?? throw new UnauthorizedAccessException();
var key = ctx.Principal.GetUsername();
var refreshToken = ctx.Properties.GetTokenValue(RefreshToken);
if (string.IsNullOrEmpty(refreshToken)) return user;
var expiresAt = ctx.Properties.GetTokenValue(ExpiresAt);
if (string.IsNullOrEmpty(expiresAt)) return user;
// Do not spam refresh if it failed
if (LastFailedRefresh.TryGetValue(key, out var time) && time.AddMinutes(30) < DateTimeOffset.UtcNow) return user;
var tokenExpiry = DateTimeOffset.ParseExact(expiresAt, "o", CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
if (tokenExpiry >= DateTimeOffset.UtcNow.AddSeconds(30)) return user;
// Ensure we're not refreshing twice
if (!RefreshInProgress.TryAdd(key, true)) return user;
try
{
var settings = (await unitOfWork.SettingsRepository.GetSettingsDtoAsync()).OidcConfig;
var tokenResponse = await RefreshTokenAsync(settings, refreshToken);
if (!string.IsNullOrEmpty(tokenResponse.Error))
{
logger.LogTrace("Failed to refresh token : {Error} - {Description}", tokenResponse.Error, tokenResponse.ErrorDescription);
LastFailedRefresh.TryAdd(key, DateTimeOffset.UtcNow);
return user;
}
var newExpiresAt = DateTimeOffset.UtcNow.AddSeconds(double.Parse(tokenResponse.ExpiresIn));
ctx.Properties.UpdateTokenValue(ExpiresAt, newExpiresAt.ToString("o"));
ctx.Properties.UpdateTokenValue(RefreshToken, tokenResponse.RefreshToken);
ctx.Properties.UpdateTokenValue(IdToken, tokenResponse.IdToken);
ctx.ShouldRenew = true;
try
{
user.UpdateLastActive();
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to update last active for {UserName}", user.UserName);
}
if (string.IsNullOrEmpty(tokenResponse.IdToken))
{
logger.LogTrace("The OIDC provider did not return an id token in the refresh response, continuous sync is not supported");
return user;
}
await SyncUserSettings(ctx, settings, tokenResponse.IdToken, user);
logger.LogTrace("Automatically refreshed token for user {UserId}", ctx.Principal?.GetUserId());
}
finally
{
RefreshInProgress.TryRemove(key, out _);
LastFailedRefresh.TryRemove(key, out _);
}
return user;
}
public async Task ClearOidcIds()
{
var users = await unitOfWork.UserRepository.GetAllUsersAsync();
foreach (var user in users)
{
user.OidcId = null;
}
await unitOfWork.CommitAsync();
}
/// <summary>
/// Tries to construct a new account from the OIDC Principal, may fail if required conditions aren't met
/// </summary>
/// <param name="request"></param>
/// <param name="principal"></param>
/// <param name="settings"></param>
/// <param name="oidcId"></param>
/// <returns></returns>
/// <exception cref="KavitaException"></exception>
private async Task<AppUser?> CreateNewAccount(HttpRequest request, ClaimsPrincipal principal, OidcConfigDto settings, string oidcId)
{
var accessRoles = principal.GetClaimsWithPrefix(settings.RolesClaim, settings.RolesPrefix)
.Where(s => PolicyConstants.ValidRoles.Contains(s)).ToList();
if (settings.SyncUserSettings && accessRoles.Count == 0)
{
throw new KavitaException("errors.oidc.role-not-assigned");
}
AppUser? user;
try
{
user = await NewUserFromOpenIdConnect(request, settings, principal, oidcId);
}
catch (KavitaException e)
{
throw;
}
catch (Exception e)
{
logger.LogError(e, "An error occured creating a new user");
throw new KavitaException("errors.oidc.creating-user");
}
if (user == null) return null;
var roles = await userManager.GetRolesAsync(user);
if (roles.Count == 0 || (!roles.Contains(PolicyConstants.LoginRole) && !roles.Contains(PolicyConstants.AdminRole)))
{
throw new KavitaException("errors.oidc.disabled-account");
}
return user;
}
/// <summary>
/// Find the best available name from claims
/// </summary>
/// <param name="claimsPrincipal"></param>
/// <param name="orEqualTo">Also return if the claim is equal to this value</param>
/// <returns></returns>
public async Task<string?> FindBestAvailableName(ClaimsPrincipal claimsPrincipal, string? orEqualTo = null)
{
var nameCandidates = new[]
{
claimsPrincipal.FindFirstValue(JwtRegisteredClaimNames.PreferredUsername),
claimsPrincipal.FindFirstValue(ClaimTypes.Name),
claimsPrincipal.FindFirstValue(ClaimTypes.GivenName),
claimsPrincipal.FindFirstValue(ClaimTypes.Surname)
};
foreach (var name in nameCandidates.Where(n => !string.IsNullOrEmpty(n)))
{
if (name == orEqualTo || await IsNameAvailable(name))
{
return name;
}
}
return null;
}
private async Task<bool> IsNameAvailable(string? name)
{
return !(await accountService.ValidateUsername(name)).Any();
}
private async Task<AppUser?> NewUserFromOpenIdConnect(HttpRequest request, OidcConfigDto settings, ClaimsPrincipal claimsPrincipal, string externalId)
{
if (!settings.ProvisionAccounts) return null;
var emailClaim = claimsPrincipal.FindFirst(ClaimTypes.Email);
if (string.IsNullOrWhiteSpace(emailClaim?.Value)) return null;
var name = await FindBestAvailableName(claimsPrincipal) ?? emailClaim.Value;
logger.LogInformation("Creating new user from OIDC: {Name} - {ExternalId}", name.Censor(), externalId);
var user = new AppUserBuilder(name, emailClaim.Value,
await unitOfWork.SiteThemeRepository.GetDefaultTheme()).Build();
var res = await userManager.CreateAsync(user);
if (!res.Succeeded)
{
logger.LogError("Failed to create new user from OIDC: {Errors}",
res.Errors.Select(x => x.Description).ToList());
throw new KavitaException("errors.oidc.creating-user");
}
if (claimsPrincipal.HasVerifiedEmail())
{
var token = await userManager.GenerateEmailConfirmationTokenAsync(user);
await userManager.ConfirmEmailAsync(user, token);
}
user.OidcId = externalId;
user.IdentityProvider = IdentityProvider.OpenIdConnect;
accountService.AddDefaultStreamsToUser(user);
await accountService.AddDefaultReadingProfileToUser(user);
await SyncUserSettings(request, settings, claimsPrincipal, user);
await SetDefaults(settings, user);
await unitOfWork.CommitAsync();
return user;
}
/// <summary>
/// Assign configured defaults (libraries, age ratings, roles) to the newly created user
/// </summary>
private async Task SetDefaults(OidcConfigDto settings, AppUser user)
{
if (settings.SyncUserSettings) return;
logger.LogDebug("Assigning defaults to newly created user; Roles: {Roles}, Libraries: {Libraries}, AgeRating: {AgeRating}, IncludeUnknowns: {IncludeUnknowns}",
settings.DefaultRoles, settings.DefaultLibraries, settings.DefaultAgeRestriction, settings.DefaultIncludeUnknowns);
// Assign roles
var errors = await accountService.UpdateRolesForUser(user, settings.DefaultRoles);
if (errors.Any()) throw new KavitaException("errors.oidc.syncing-user");
// Assign libraries
await accountService.UpdateLibrariesForUser(user, settings.DefaultLibraries, settings.DefaultRoles.Contains(PolicyConstants.AdminRole));
// Assign age rating
user.AgeRestriction = settings.DefaultAgeRestriction;
user.AgeRestrictionIncludeUnknowns = settings.DefaultIncludeUnknowns;
await unitOfWork.CommitAsync();
}
/// <summary>
/// Syncs the given user to the principal found in the id token
/// </summary>
/// <param name="ctx"></param>
/// <param name="settings"></param>
/// <param name="idToken"></param>
/// <param name="user"></param>
/// <exception cref="UnauthorizedAccessException">If syncing fails</exception>
private async Task SyncUserSettings(CookieValidatePrincipalContext ctx, OidcConfigDto settings, string idToken, AppUser user)
{
if (!settings.SyncUserSettings || user.IdentityProvider != IdentityProvider.OpenIdConnect) return;
try
{
var newPrincipal = await ParseIdToken(settings, idToken);
await SyncUserSettings(ctx.HttpContext.Request, settings, newPrincipal, user);
}
catch (KavitaException ex)
{
logger.LogError(ex, "Failed to sync user after token refresh");
throw new UnauthorizedAccessException(ex.Message);
}
}
/// <summary>
/// Updates roles, library access and age rating restriction. Will not modify the default admin
/// </summary>
/// <param name="request"></param>
/// <param name="settings"></param>
/// <param name="claimsPrincipal"></param>
/// <param name="user"></param>
public async Task SyncUserSettings(HttpRequest request, OidcConfigDto settings, ClaimsPrincipal claimsPrincipal, AppUser user)
{
if (!settings.SyncUserSettings || user.IdentityProvider != IdentityProvider.OpenIdConnect) return;
// Never sync the default user
var defaultAdminUser = await unitOfWork.UserRepository.GetDefaultAdminUser();
if (defaultAdminUser.Id == user.Id) return;
logger.LogDebug("Syncing user {UserId} from OIDC", user.Id);
try
{
await SyncEmail(request, settings, claimsPrincipal, user);
await SyncUsername(claimsPrincipal, user);
await SyncRoles(settings, claimsPrincipal, user);
await SyncLibraries(settings, claimsPrincipal, user);
await SyncAgeRestriction(settings, claimsPrincipal, user);
if (unitOfWork.HasChanges())
{
await unitOfWork.CommitAsync();
}
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to sync user {UserId} from OIDC", user.Id);
await unitOfWork.RollbackAsync();
throw new KavitaException("errors.oidc.syncing-user", ex);
}
}
private async Task SyncEmail(HttpRequest request, OidcConfigDto settings, ClaimsPrincipal claimsPrincipal, AppUser user)
{
var email = claimsPrincipal.FindFirstValue(ClaimTypes.Email);
if (string.IsNullOrEmpty(email) || user.Email == email) return;
if (settings.RequireVerifiedEmail && !claimsPrincipal.HasVerifiedEmail())
{
throw new KavitaException("errors.oidc.email-not-verified");
}
// Ensure no other user uses this email
var other = await userManager.FindByEmailAsync(email);
if (other != null)
{
throw new KavitaException("errors.oidc.email-in-use");
}
// The email is verified, we can go ahead and change & confirm it
if (claimsPrincipal.HasVerifiedEmail())
{
var res = await userManager.SetEmailAsync(user, email);
if (!res.Succeeded)
{
logger.LogError("Failed to update email for user {UserId} from OIDC {Errors}", user.Id, res.Errors.Select(x => x.Description).ToList());
throw new KavitaException("errors.oidc.failed-to-update-email");
}
user.EmailConfirmed = true;
await userManager.UpdateAsync(user);
return;
}
var token = await userManager.GenerateEmailConfirmationTokenAsync(user);
var isValidEmailAddress = !string.IsNullOrEmpty(user.Email) && emailService.IsValidEmail(user.Email);
var isEmailSetup = (await unitOfWork.SettingsRepository.GetSettingsDtoAsync()).IsEmailSetup();
var shouldEmailUser = isEmailSetup || !isValidEmailAddress;
user.EmailConfirmed = !shouldEmailUser;
user.ConfirmationToken = token;
await userManager.UpdateAsync(user);
var emailLink = await emailService.GenerateEmailLink(request, user.ConfirmationToken, "confirm-email-update", email);
logger.LogCritical("[Update Email]: Automatic email update after OIDC sync, email Link for {UserId}: {Link}", user.Id, emailLink);
if (!shouldEmailUser)
{
logger.LogInformation("Cannot email admin, email not setup or admin email invalid");
return;
}
if (!isValidEmailAddress)
{
logger.LogCritical("[Update Email]: User is trying to update their email, but their existing email ({Email}) isn't valid. No email will be send", user.Email.Censor());
return;
}
try
{
var invitingUser = await unitOfWork.UserRepository.GetDefaultAdminUser();
BackgroundJob.Enqueue(() => emailService.SendEmailChangeEmail(new ConfirmationEmailDto()
{
EmailAddress = string.IsNullOrEmpty(user.Email) ? email : user.Email,
InstallId = BuildInfo.Version.ToString(),
InvitingUser = invitingUser.UserName,
ServerConfirmationLink = emailLink,
}));
}
catch (Exception)
{
/* Swallow exception */
}
}
private async Task SyncUsername(ClaimsPrincipal claimsPrincipal, AppUser user)
{
var bestName = await FindBestAvailableName(claimsPrincipal, user.UserName);
if (bestName == null || bestName == user.UserName) return;
var res = await userManager.SetUserNameAsync(user, bestName);
if (!res.Succeeded)
{
logger.LogError("Failed to update username for user {UserId} to {NewUserName} from OIDC {Errors}", user.Id,
bestName.Censor(), res.Errors.Select(x => x.Description).ToList());
throw new KavitaException("errors.oidc.failed-to-update-username");
}
}
private async Task SyncRoles(OidcConfigDto settings, ClaimsPrincipal claimsPrincipal, AppUser user)
{
var roles = claimsPrincipal.GetClaimsWithPrefix(settings.RolesClaim, settings.RolesPrefix)
.Where(s => PolicyConstants.ValidRoles.Contains(s)).ToList();
logger.LogDebug("Syncing access roles for user {UserId}, found roles {Roles}", user.Id, roles);
var errors = (await accountService.UpdateRolesForUser(user, roles)).ToList();
if (errors.Any())
{
logger.LogError("Failed to sync roles {Errors}", errors.Select(x => x.Description).ToList());
throw new KavitaException("errors.oidc.syncing-user");
}
}
private async Task SyncLibraries(OidcConfigDto settings, ClaimsPrincipal claimsPrincipal, AppUser user)
{
var libraryAccessPrefix = settings.RolesPrefix + LibraryAccessPrefix;
var libraryAccess = claimsPrincipal.GetClaimsWithPrefix(settings.RolesClaim, libraryAccessPrefix);
logger.LogDebug("Syncing libraries for user {UserId}, found library roles {Roles}", user.Id, libraryAccess);
var allLibraries = (await unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList();
// Distinct to ensure each library (id) is only present once
var librariesIds = allLibraries.Where(l => libraryAccess.Contains(l.Name)).Select(l => l.Id).Distinct().ToList();
var hasAdminRole = await userManager.IsInRoleAsync(user, PolicyConstants.AdminRole);
await accountService.UpdateLibrariesForUser(user, librariesIds, hasAdminRole);
}
private async Task SyncAgeRestriction(OidcConfigDto settings, ClaimsPrincipal claimsPrincipal, AppUser user)
{
if (await userManager.IsInRoleAsync(user, PolicyConstants.AdminRole))
{
logger.LogDebug("User {UserId} is admin, granting access to all age ratings", user.Id);
user.AgeRestriction = AgeRating.NotApplicable;
user.AgeRestrictionIncludeUnknowns = true;
return;
}
var ageRatingPrefix = settings.RolesPrefix + AgeRestrictionPrefix;
var ageRatings = claimsPrincipal.GetClaimsWithPrefix(settings.RolesClaim, ageRatingPrefix);
logger.LogDebug("Syncing age restriction for user {UserId}, found restrictions {Restrictions}", user.Id, ageRatings);
if (ageRatings.Count == 0 || (ageRatings.Count == 1 && ageRatings.Contains(IncludeUnknowns)))
{
logger.LogDebug("No age restriction found in roles, setting to NotApplicable and Include Unknowns: {IncludeUnknowns}", settings.DefaultIncludeUnknowns);
user.AgeRestriction = AgeRating.NotApplicable;
user.AgeRestrictionIncludeUnknowns = true;
return;
}
var highestAgeRestriction = AgeRating.NotApplicable;
foreach (var ar in ageRatings)
{
if (!EnumExtensions.TryParse(ar, out AgeRating ageRating))
{
logger.LogDebug("Age Restriction role configured that failed to map to a known age rating: {RoleName}", AgeRestrictionPrefix+ar);
continue;
}
if (ageRating > highestAgeRestriction)
{
highestAgeRestriction = ageRating;
}
}
user.AgeRestriction = highestAgeRestriction;
user.AgeRestrictionIncludeUnknowns = ageRatings.Contains(IncludeUnknowns);
logger.LogDebug("Synced age restriction for user {UserId}, AgeRestriction {AgeRestriction}, IncludeUnknowns: {IncludeUnknowns}",
user.Id, user.AgeRestriction, user.AgeRestrictionIncludeUnknowns);
}
/// <summary>
/// Loads the discovery document if not already loaded, then refreshed the tokens for the user
/// </summary>
/// <param name="dto"></param>
/// <param name="refreshToken"></param>
/// <returns></returns>
/// <exception cref="InvalidOperationException"></exception>
private static async Task<OpenIdConnectMessage> RefreshTokenAsync(OidcConfigDto dto, string refreshToken)
{
var discoveryDocument = await OidcConfigurationManager.GetConfigurationAsync();
var msg = new
{
grant_type = RefreshToken,
refresh_token = refreshToken,
client_id = dto.ClientId,
client_secret = dto.Secret,
};
var json = await discoveryDocument.TokenEndpoint
.AllowAnyHttpStatus()
.PostUrlEncodedAsync(msg)
.ReceiveString();
return new OpenIdConnectMessage(json);
}
/// <summary>
/// Loads the discovery document if not already loaded, then parses the given id token securely
/// </summary>
/// <param name="dto"></param>
/// <param name="idToken"></param>
/// <returns></returns>
/// <exception cref="InvalidOperationException"></exception>
private static async Task<ClaimsPrincipal> ParseIdToken(OidcConfigDto dto, string idToken)
{
var discoveryDocument = await OidcConfigurationManager.GetConfigurationAsync();
var tokenValidationParameters = new TokenValidationParameters
{
ValidIssuer = discoveryDocument.Issuer,
ValidAudience = dto.ClientId,
IssuerSigningKeys = discoveryDocument.SigningKeys,
ValidateIssuerSigningKey = true,
};
var handler = new JwtSecurityTokenHandler();
var principal = handler.ValidateToken(idToken, tokenValidationParameters, out _);
return principal;
}
/// <summary>
/// Return a list of claims in the same way the NativeJWT token would map them.
/// Optionally include original claims if the claims are needed later in the pipeline
/// </summary>
/// <param name="services"></param>
/// <param name="principal"></param>
/// <param name="user"></param>
/// <param name="includeOriginalClaims"></param>
/// <returns></returns>
public static async Task<List<Claim>> ConstructNewClaimsList(IServiceProvider services, ClaimsPrincipal? principal, AppUser user, bool includeOriginalClaims = true)
{
var claims = new List<Claim>
{
new(ClaimTypes.NameIdentifier, user.Id.ToString()),
new(JwtRegisteredClaimNames.Name, user.UserName ?? string.Empty),
new(ClaimTypes.Name, user.UserName ?? string.Empty),
};
var userManager = services.GetRequiredService<UserManager<AppUser>>();
var roles = await userManager.GetRolesAsync(user);
claims.AddRange(roles.Select(role => new Claim(ClaimTypes.Role, role)));
if (includeOriginalClaims)
{
claims.AddRange(principal?.Claims ?? []);
}
return claims;
}
}

View File

@ -681,13 +681,34 @@ public class ExternalMetadataService : IExternalMetadataService
return [.. staff];
}
/// <summary>
/// Helper method, calls <see cref="ProcessGenreAndTagLists"/>
/// </summary>
/// <param name="externalMetadata"></param>
/// <param name="settings"></param>
/// <param name="processedTags"></param>
/// <param name="processedGenres"></param>
private static void GenerateGenreAndTagLists(ExternalSeriesDetailDto externalMetadata, MetadataSettingsDto settings,
ref List<string> processedTags, ref List<string> processedGenres)
{
externalMetadata.Tags ??= [];
externalMetadata.Genres ??= [];
GenerateGenreAndTagLists(externalMetadata.Genres, externalMetadata.Tags.Select(t => t.Name).ToList(),
settings, ref processedTags, ref processedGenres);
}
var mappings = ApplyFieldMappings(externalMetadata.Tags.Select(t => t.Name), MetadataFieldType.Tag, settings.FieldMappings);
/// <summary>
/// Run all genres and tags through the Metadata settings
/// </summary>
/// <param name="genres">Genres to process</param>
/// <param name="tags">Tags to process</param>
/// <param name="settings"></param>
/// <param name="processedTags"></param>
/// <param name="processedGenres"></param>
private static void GenerateGenreAndTagLists(IList<string> genres, IList<string> tags, MetadataSettingsDto settings,
ref List<string> processedTags, ref List<string> processedGenres)
{
var mappings = ApplyFieldMappings(tags, MetadataFieldType.Tag, settings.FieldMappings);
if (mappings.TryGetValue(MetadataFieldType.Tag, out var tagsToTags))
{
processedTags.AddRange(tagsToTags);
@ -697,7 +718,7 @@ public class ExternalMetadataService : IExternalMetadataService
processedGenres.AddRange(tagsToGenres);
}
mappings = ApplyFieldMappings(externalMetadata.Genres, MetadataFieldType.Genre, settings.FieldMappings);
mappings = ApplyFieldMappings(genres, MetadataFieldType.Genre, settings.FieldMappings);
if (mappings.TryGetValue(MetadataFieldType.Tag, out var genresToTags))
{
processedTags.AddRange(genresToTags);
@ -711,6 +732,30 @@ public class ExternalMetadataService : IExternalMetadataService
processedGenres = ApplyBlackWhiteList(settings, MetadataFieldType.Genre, processedGenres);
}
/// <summary>
/// Processes the given tags and genres only if <see cref="MetadataSettingsDto.EnableExtendedMetadataProcessing"/>
/// is true, else return without change
/// </summary>
/// <param name="genres"></param>
/// <param name="tags"></param>
/// <param name="settings"></param>
/// <param name="processedTags"></param>
/// <param name="processedGenres"></param>
public static void GenerateExternalGenreAndTagsList(IList<string> genres, IList<string> tags,
MetadataSettingsDto settings, out List<string> processedTags, out List<string> processedGenres)
{
if (!settings.EnableExtendedMetadataProcessing)
{
processedTags = [..tags];
processedGenres = [..genres];
return;
}
processedTags = [];
processedGenres = [];
GenerateGenreAndTagLists(genres, tags, settings, ref processedTags, ref processedGenres);
}
private async Task<bool> UpdateRelationships(Series series, MetadataSettingsDto settings, IList<SeriesRelationship>? externalMetadataRelations, AppUser defaultAdmin)
{
if (!settings.EnableRelationships) return false;
@ -1003,16 +1048,19 @@ public class ExternalMetadataService : IExternalMetadataService
private static List<string> ApplyBlackWhiteList(MetadataSettingsDto settings, MetadataFieldType fieldType, List<string> processedStrings)
{
var whiteList = settings.Whitelist.Select(t => t.ToNormalized()).ToList();
var blackList = settings.Blacklist.Select(t => t.ToNormalized()).ToList();
return fieldType switch
{
MetadataFieldType.Genre => processedStrings.Distinct()
.Where(g => settings.Blacklist.Count == 0 || !settings.Blacklist.Contains(g))
.Where(g => blackList.Count == 0 || !blackList.Contains(g.ToNormalized()))
.ToList(),
MetadataFieldType.Tag => processedStrings.Distinct()
.Where(g => settings.Blacklist.Count == 0 || !settings.Blacklist.Contains(g))
.Where(g => settings.Whitelist.Count == 0 || settings.Whitelist.Contains(g))
.Where(g => blackList.Count == 0 || !blackList.Contains(g.ToNormalized()))
.Where(g => whiteList.Count == 0 || whiteList.Contains(g.ToNormalized()))
.ToList(),
_ => throw new ArgumentOutOfRangeException(nameof(fieldType), fieldType, null)
_ => throw new ArgumentOutOfRangeException(nameof(fieldType), fieldType, null),
};
}
@ -1718,24 +1766,22 @@ public class ExternalMetadataService : IExternalMetadataService
foreach (var value in values)
{
var mapping = mappings.FirstOrDefault(m =>
var matchingMappings = mappings.Where(m =>
m.SourceType == sourceType &&
m.SourceValue.Equals(value, StringComparison.OrdinalIgnoreCase));
m.SourceValue.ToNormalized().Equals(value.ToNormalized()));
if (mapping != null && !string.IsNullOrWhiteSpace(mapping.DestinationValue))
var keepOriginal = true;
foreach (var mapping in matchingMappings.Where(mapping => !string.IsNullOrWhiteSpace(mapping.DestinationValue)))
{
var targetType = mapping.DestinationType;
result[mapping.DestinationType].Add(mapping.DestinationValue);
if (!mapping.ExcludeFromSource)
{
result[sourceType].Add(mapping.SourceValue);
}
result[targetType].Add(mapping.DestinationValue);
// Only keep the original tags if none of the matches want to remove it
keepOriginal = keepOriginal && !mapping.ExcludeFromSource;
}
else
if (keepOriginal)
{
// If no mapping, keep the original value
result[sourceType].Add(value);
}
}
@ -1760,9 +1806,15 @@ public class ExternalMetadataService : IExternalMetadataService
{
// Find highest age rating from mappings
mappings ??= new Dictionary<string, AgeRating>();
mappings = mappings
.GroupBy(m => m.Key.ToNormalized())
.ToDictionary(
g => g.Key,
g => g.Max(m => m.Value)
);
return values
.Select(v => mappings.TryGetValue(v, out var mapping) ? mapping : AgeRating.Unknown)
.Select(v => mappings.GetValueOrDefault(v.ToNormalized(), AgeRating.Unknown))
.DefaultIfEmpty(AgeRating.Unknown)
.Max();
}

View File

@ -209,12 +209,17 @@ public class SeriesService : ISeriesService
{
var metadataSettings = await _unitOfWork.SettingsRepository.GetMetadataSettingDto();
var allTags = series.Metadata.Tags.Select(t => t.Title).Concat(series.Metadata.Genres.Select(g => g.Title));
var updatedRating = ExternalMetadataService.DetermineAgeRating(allTags, metadataSettings.AgeRatingMappings);
if (updatedRating > series.Metadata.AgeRating)
if (metadataSettings.EnableExtendedMetadataProcessing)
{
series.Metadata.AgeRating = updatedRating;
series.Metadata.KPlusOverrides.Remove(MetadataSettingField.AgeRating);
var updatedRating = ExternalMetadataService.DetermineAgeRating(allTags, metadataSettings.AgeRatingMappings);
if (updatedRating > series.Metadata.AgeRating)
{
series.Metadata.AgeRating = updatedRating;
series.Metadata.KPlusOverrides.Remove(MetadataSettingField.AgeRating);
}
}
}
}

View File

@ -1,28 +1,48 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Security.Claims;
using System.Text.Json;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.DTOs.KavitaPlus.Metadata;
using API.DTOs.Settings;
using API.Entities;
using API.Entities.Enums;
using API.Entities.MetadataMatching;
using API.Extensions;
using API.Logging;
using API.Services.Tasks.Scanner;
using Flurl.Http;
using Hangfire;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Kavita.Common.Helpers;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Protocols.OpenIdConnect;
namespace API.Services;
public interface ISettingsService
{
Task<MetadataSettingsDto> UpdateMetadataSettings(MetadataSettingsDto dto);
/// <summary>
/// Update <see cref="MetadataSettings.Whitelist"/>, <see cref="MetadataSettings.Blacklist"/>, <see cref="MetadataSettings.AgeRatingMappings"/>, <see cref="MetadataSettings.FieldMappings"/>
/// with data from the given dto.
/// </summary>
/// <param name="dto"></param>
/// <param name="settings"></param>
/// <returns></returns>
Task<FieldMappingsImportResultDto> ImportFieldMappings(FieldMappingsDto dto, ImportSettingsDto settings);
Task<ServerSettingDto> UpdateSettings(ServerSettingDto updateSettingsDto);
/// <summary>
/// Check if the server can reach the authority at the given uri
/// </summary>
/// <param name="authority"></param>
/// <returns></returns>
Task<bool> IsValidAuthority(string authority);
}
@ -33,16 +53,18 @@ public class SettingsService : ISettingsService
private readonly ILibraryWatcher _libraryWatcher;
private readonly ITaskScheduler _taskScheduler;
private readonly ILogger<SettingsService> _logger;
private readonly IOidcService _oidcService;
public SettingsService(IUnitOfWork unitOfWork, IDirectoryService directoryService,
ILibraryWatcher libraryWatcher, ITaskScheduler taskScheduler,
ILogger<SettingsService> logger)
ILogger<SettingsService> logger, IOidcService oidcService)
{
_unitOfWork = unitOfWork;
_directoryService = directoryService;
_libraryWatcher = libraryWatcher;
_taskScheduler = taskScheduler;
_logger = logger;
_oidcService = oidcService;
}
/// <summary>
@ -54,6 +76,7 @@ public class SettingsService : ISettingsService
{
var existingMetadataSetting = await _unitOfWork.SettingsRepository.GetMetadataSettings();
existingMetadataSetting.Enabled = dto.Enabled;
existingMetadataSetting.EnableExtendedMetadataProcessing = dto.EnableExtendedMetadataProcessing;
existingMetadataSetting.EnableSummary = dto.EnableSummary;
existingMetadataSetting.EnableLocalizedName = dto.EnableLocalizedName;
existingMetadataSetting.EnablePublicationStatus = dto.EnablePublicationStatus;
@ -108,6 +131,150 @@ public class SettingsService : ISettingsService
return await _unitOfWork.SettingsRepository.GetMetadataSettingDto();
}
public async Task<FieldMappingsImportResultDto> ImportFieldMappings(FieldMappingsDto dto, ImportSettingsDto settings)
{
if (dto.AgeRatingMappings.Keys.Distinct().Count() != dto.AgeRatingMappings.Count)
{
throw new KavitaException("errors.import-fields.non-unique-age-ratings");
}
if (dto.FieldMappings.DistinctBy(f => f.Id).Count() != dto.FieldMappings.Count)
{
throw new KavitaException("errors.import-fields.non-unique-fields");
}
return settings.ImportMode switch
{
ImportMode.Merge => await MergeFieldMappings(dto, settings),
ImportMode.Replace => await ReplaceFieldMappings(dto, settings),
_ => throw new ArgumentOutOfRangeException(nameof(settings), $"Invalid import mode {nameof(settings.ImportMode)}")
};
}
/// <summary>
/// Will fully replace any enabled fields, always successful
/// </summary>
/// <param name="dto"></param>
/// <param name="settings"></param>
/// <returns></returns>
private async Task<FieldMappingsImportResultDto> ReplaceFieldMappings(FieldMappingsDto dto, ImportSettingsDto settings)
{
var existingMetadataSetting = await _unitOfWork.SettingsRepository.GetMetadataSettingDto();
if (settings.Whitelist)
{
existingMetadataSetting.Whitelist = dto.Whitelist;
}
if (settings.Blacklist)
{
existingMetadataSetting.Blacklist = dto.Blacklist;
}
if (settings.AgeRatings)
{
existingMetadataSetting.AgeRatingMappings = dto.AgeRatingMappings;
}
if (settings.FieldMappings)
{
existingMetadataSetting.FieldMappings = dto.FieldMappings;
}
return new FieldMappingsImportResultDto
{
Success = true,
ResultingMetadataSettings = existingMetadataSetting,
AgeRatingConflicts = [],
};
}
/// <summary>
/// Tries to merge all enabled fields, fails if any merge was marked as manual. Always goes through all items
/// </summary>
/// <param name="dto"></param>
/// <param name="settings"></param>
/// <returns></returns>
private async Task<FieldMappingsImportResultDto> MergeFieldMappings(FieldMappingsDto dto, ImportSettingsDto settings)
{
var existingMetadataSetting = await _unitOfWork.SettingsRepository.GetMetadataSettingDto();
if (settings.Whitelist)
{
existingMetadataSetting.Whitelist = existingMetadataSetting.Whitelist.Union(dto.Whitelist).DistinctBy(d => d.ToNormalized()).ToList();
}
if (settings.Blacklist)
{
existingMetadataSetting.Blacklist = existingMetadataSetting.Blacklist.Union(dto.Blacklist).DistinctBy(d => d.ToNormalized()).ToList();
}
List<string> ageRatingConflicts = [];
if (settings.AgeRatings)
{
foreach (var arm in dto.AgeRatingMappings)
{
if (!existingMetadataSetting.AgeRatingMappings.TryGetValue(arm.Key, out var mapping))
{
existingMetadataSetting.AgeRatingMappings.Add(arm.Key, arm.Value);
continue;
}
if (arm.Value == mapping)
{
continue;
}
var resolution = settings.AgeRatingConflictResolutions.GetValueOrDefault(arm.Key, settings.Resolution);
switch (resolution)
{
case ConflictResolution.Keep: continue;
case ConflictResolution.Replace:
existingMetadataSetting.AgeRatingMappings[arm.Key] = arm.Value;
break;
case ConflictResolution.Manual:
ageRatingConflicts.Add(arm.Key);
break;
default:
throw new ArgumentOutOfRangeException(nameof(settings), $"Invalid conflict resolution {nameof(ConflictResolution)}.");
}
}
}
if (settings.FieldMappings)
{
existingMetadataSetting.FieldMappings = existingMetadataSetting.FieldMappings
.Union(dto.FieldMappings)
.DistinctBy(fm => new
{
fm.SourceType,
SourceValue = fm.SourceValue.ToNormalized(),
fm.DestinationType,
DestinationValue = fm.DestinationValue.ToNormalized(),
})
.ToList();
}
if (ageRatingConflicts.Count > 0)
{
return new FieldMappingsImportResultDto
{
Success = false,
AgeRatingConflicts = ageRatingConflicts,
};
}
return new FieldMappingsImportResultDto
{
Success = true,
ResultingMetadataSettings = existingMetadataSetting,
AgeRatingConflicts = [],
};
}
/// <summary>
/// Update Server Settings
/// </summary>
@ -135,6 +302,7 @@ public class SettingsService : ISettingsService
}
var updateTask = false;
var updatedOidcSettings = false;
foreach (var setting in currentSettings)
{
if (setting.Key == ServerSettingKey.OnDeckProgressDays &&
@ -172,7 +340,7 @@ public class SettingsService : ISettingsService
updateTask = updateTask || UpdateSchedulingSettings(setting, updateSettingsDto);
UpdateEmailSettings(setting, updateSettingsDto);
updatedOidcSettings = await UpdateOidcSettings(setting, updateSettingsDto) || updatedOidcSettings;
if (setting.Key == ServerSettingKey.IpAddresses && updateSettingsDto.IpAddresses != setting.Value)
@ -324,6 +492,17 @@ public class SettingsService : ISettingsService
BackgroundJob.Enqueue(() => _taskScheduler.ScheduleTasks());
}
if (updatedOidcSettings)
{
Configuration.OidcSettings = new Configuration.OpenIdConnectSettings
{
Authority = updateSettingsDto.OidcConfig.Authority,
ClientId = updateSettingsDto.OidcConfig.ClientId,
Secret = updateSettingsDto.OidcConfig.Secret,
CustomScopes = updateSettingsDto.OidcConfig.CustomScopes,
};
}
if (updateSettingsDto.EnableFolderWatching)
{
BackgroundJob.Enqueue(() => _libraryWatcher.StartWatching());
@ -346,6 +525,29 @@ public class SettingsService : ISettingsService
return updateSettingsDto;
}
public async Task<bool> IsValidAuthority(string authority)
{
if (string.IsNullOrEmpty(authority))
{
return false;
}
try
{
var hasTrailingSlash = authority.EndsWith('/');
var url = authority + (hasTrailingSlash ? string.Empty : "/") + ".well-known/openid-configuration";
var json = await url.GetStringAsync();
var config = OpenIdConnectConfiguration.Create(json);
return config.Issuer == authority;
}
catch (Exception e)
{
_logger.LogDebug(e, "OpenIdConfiguration failed: {Reason}", e.Message);
return false;
}
}
private void UpdateBookmarkDirectory(string originalBookmarkDirectory, string bookmarkDirectory)
{
_directoryService.ExistOrCreate(bookmarkDirectory);
@ -379,6 +581,45 @@ public class SettingsService : ISettingsService
return false;
}
/// <summary>
/// Updates oidc settings and return true if a change was made
/// </summary>
/// <param name="setting"></param>
/// <param name="updateSettingsDto"></param>
/// <returns></returns>
/// <remarks>Does not commit any changes</remarks>
/// <exception cref="KavitaException">If the authority is invalid</exception>
private async Task<bool> UpdateOidcSettings(ServerSetting setting, ServerSettingDto updateSettingsDto)
{
if (setting.Key != ServerSettingKey.OidcConfiguration) return false;
if (updateSettingsDto.OidcConfig.RolesClaim.Trim() == string.Empty)
{
updateSettingsDto.OidcConfig.RolesClaim = ClaimTypes.Role;
}
var newValue = JsonSerializer.Serialize(updateSettingsDto.OidcConfig);
if (setting.Value == newValue) return false;
var currentConfig = JsonSerializer.Deserialize<OidcConfigDto>(setting.Value)!;
if (currentConfig.Authority != updateSettingsDto.OidcConfig.Authority)
{
if (!await IsValidAuthority(updateSettingsDto.OidcConfig.Authority + string.Empty))
{
throw new KavitaException("oidc-invalid-authority");
}
_logger.LogWarning("OIDC Authority is changing, clearing all external ids");
await _oidcService.ClearOidcIds();
}
setting.Value = newValue;
_unitOfWork.SettingsRepository.Update(setting);
return true;
}
private void UpdateEmailSettings(ServerSetting setting, ServerSettingDto updateSettingsDto)
{
if (setting.Key == ServerSettingKey.EmailHost &&

View File

@ -0,0 +1,59 @@
using System;
using System.Security.Cryptography;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.Extensions.Caching.Memory;
namespace API.Services.Store;
public class CustomTicketStore(IMemoryCache cache): ITicketStore
{
public async Task<string> StoreAsync(AuthenticationTicket ticket)
{
// Note: It might not be needed to make this cryptographic random, but better safe than sorry
var bytes = new byte[32];
RandomNumberGenerator.Fill(bytes);
var key = Convert.ToBase64String(bytes);
await RenewAsync(key, ticket);
return key;
}
public Task RenewAsync(string key, AuthenticationTicket ticket)
{
var options = new MemoryCacheEntryOptions
{
Priority = CacheItemPriority.NeverRemove,
Size = 1,
};
var expiresUtc = ticket.Properties.ExpiresUtc;
if (expiresUtc.HasValue)
{
options.AbsoluteExpiration = expiresUtc.Value;
}
else
{
options.SlidingExpiration = TimeSpan.FromDays(7);
}
cache.Set(key, ticket, options);
return Task.CompletedTask;
}
public Task<AuthenticationTicket> RetrieveAsync(string key)
{
return Task.FromResult(cache.Get<AuthenticationTicket>(key));
}
public Task RemoveAsync(string key)
{
cache.Remove(key);
return Task.CompletedTask;
}
}

Some files were not shown because too many files have changed in this diff Show More