New Scanner + People Pages (#3286)

Co-authored-by: Robbie Davis <robbie@therobbiedavis.com>
This commit is contained in:
Joe Milazzo 2024-10-23 15:11:18 -07:00 committed by GitHub
parent 1ed0eae22d
commit ba20ad4ecc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
142 changed files with 17529 additions and 3038 deletions

View File

@ -29,7 +29,6 @@
<ItemGroup>
<Folder Include="Services\Test Data\ArchiveService\ComicInfos" />
<Folder Include="Services\Test Data\ImageService\Covers\" />
<Folder Include="Services\Test Data\ScannerService\Manga" />
</ItemGroup>
<ItemGroup>

View File

@ -126,28 +126,45 @@ public class QueryableExtensionsTests
[InlineData(false, 1)]
public void RestrictAgainstAgeRestriction_Person_ShouldRestrictEverythingAboveTeen(bool includeUnknowns, int expectedCount)
{
var items = new List<Person>()
// Arrange
var items = new List<Person>
{
new PersonBuilder("Test", PersonRole.Character)
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
.Build(),
new PersonBuilder("Test", PersonRole.Character)
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Unknown).Build())
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.Teen).Build())
.Build(),
new PersonBuilder("Test", PersonRole.Character)
.WithSeriesMetadata(new SeriesMetadataBuilder().WithAgeRating(AgeRating.X18Plus).Build())
.Build(),
CreatePersonWithSeriesMetadata("Test1", AgeRating.Teen),
CreatePersonWithSeriesMetadata("Test2", AgeRating.Unknown, AgeRating.Teen),
CreatePersonWithSeriesMetadata("Test3", AgeRating.X18Plus)
};
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(new AgeRestriction()
var ageRestriction = new AgeRestriction
{
AgeRating = AgeRating.Teen,
IncludeUnknowns = includeUnknowns
});
};
// Act
var filtered = items.AsQueryable().RestrictAgainstAgeRestriction(ageRestriction);
// Assert
Assert.Equal(expectedCount, filtered.Count());
}
private static Person CreatePersonWithSeriesMetadata(string name, params AgeRating[] ageRatings)
{
var person = new PersonBuilder(name).Build();
foreach (var ageRating in ageRatings)
{
var seriesMetadata = new SeriesMetadataBuilder().WithAgeRating(ageRating).Build();
person.SeriesMetadataPeople.Add(new SeriesMetadataPeople
{
SeriesMetadata = seriesMetadata,
Person = person,
Role = PersonRole.Character // Role is now part of the relationship
});
}
return person;
}
[Theory]
[InlineData(true, 2)]
[InlineData(false, 1)]

View File

@ -185,6 +185,35 @@ public class SeriesExtensionsTests
Assert.Equal("Volume 1 Chapter 1", series.GetCoverImage());
}
[Fact]
public void GetCoverImage_JustVolumes_ButVolume0()
{
var series = new SeriesBuilder("Test 1")
.WithFormat(MangaFormat.Archive)
.WithVolume(new VolumeBuilder("0")
.WithName("Volume 0")
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Volume 0")
.Build())
.Build())
.WithVolume(new VolumeBuilder("1")
.WithName("Volume 1")
.WithChapter(new ChapterBuilder(Parser.DefaultChapter)
.WithCoverImage("Volume 1")
.Build())
.Build())
.Build();
foreach (var vol in series.Volumes)
{
vol.CoverImage = vol.Chapters.MinBy(x => x.SortOrder, ChapterSortComparerDefaultFirst.Default)?.CoverImage;
}
Assert.Equal("Volume 1", series.GetCoverImage());
}
[Fact]
public void GetCoverImage_JustSpecials_WithDecimal()
{

View File

@ -1,128 +0,0 @@
using System.Collections.Generic;
using API.Data;
using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using Xunit;
namespace API.Tests.Helpers;
public class GenreHelperTests
{
[Fact]
public void UpdateGenre_ShouldAddNewGenre()
{
var allGenres = new Dictionary<string, Genre>
{
{"Action".ToNormalized(), new GenreBuilder("Action").Build()},
{"Sci-fi".ToNormalized(), new GenreBuilder("Sci-fi").Build()}
};
var genreAdded = new List<Genre>();
var addedCount = 0;
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Adventure"}, (genre, isNew) =>
{
if (isNew)
{
addedCount++;
}
genreAdded.Add(genre);
});
Assert.Equal(2, genreAdded.Count);
Assert.Equal(1, addedCount);
Assert.Equal(3, allGenres.Count);
}
[Fact]
public void UpdateGenre_ShouldNotAddDuplicateGenre()
{
var allGenres = new Dictionary<string, Genre>
{
{"Action".ToNormalized(), new GenreBuilder("Action").Build()},
{"Sci-fi".ToNormalized(), new GenreBuilder("Sci-fi").Build()}
};
var genreAdded = new List<Genre>();
var addedCount = 0;
GenreHelper.UpdateGenre(allGenres, new[] {"Action", "Scifi"}, (genre, isNew) =>
{
if (isNew)
{
addedCount++;
}
genreAdded.Add(genre);
});
Assert.Equal(0, addedCount);
Assert.Equal(2, genreAdded.Count);
Assert.Equal(2, allGenres.Count);
}
[Fact]
public void AddGenre_ShouldAddOnlyNonExistingGenre()
{
var existingGenres = new List<Genre>
{
new GenreBuilder("Action").Build(),
new GenreBuilder("action").Build(),
new GenreBuilder("Sci-fi").Build(),
};
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Action").Build());
Assert.Equal(3, existingGenres.Count);
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("action").Build());
Assert.Equal(3, existingGenres.Count);
GenreHelper.AddGenreIfNotExists(existingGenres, new GenreBuilder("Shonen").Build());
Assert.Equal(4, existingGenres.Count);
}
[Fact]
public void KeepOnlySamePeopleBetweenLists()
{
var existingGenres = new List<Genre>
{
new GenreBuilder("Action").Build(),
new GenreBuilder("Sci-fi").Build(),
};
var peopleFromChapters = new List<Genre>
{
new GenreBuilder("Action").Build(),
};
var genreRemoved = new List<Genre>();
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
peopleFromChapters, genre =>
{
genreRemoved.Add(genre);
});
Assert.Single(genreRemoved);
}
[Fact]
public void RemoveEveryoneIfNothingInRemoveAllExcept()
{
var existingGenres = new List<Genre>
{
new GenreBuilder("Action").Build(),
new GenreBuilder("Sci-fi").Build(),
};
var peopleFromChapters = new List<Genre>();
var genreRemoved = new List<Genre>();
GenreHelper.KeepOnlySameGenreBetweenLists(existingGenres,
peopleFromChapters, genre =>
{
genreRemoved.Add(genre);
});
Assert.Equal(2, genreRemoved.Count);
}
}

View File

@ -1,415 +1,143 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
using API.Helpers;
using API.Helpers.Builders;
using API.Services.Tasks.Scanner.Parser;
using Xunit;
namespace API.Tests.Helpers;
public class PersonHelperTests
public class PersonHelperTests : AbstractDbTest
{
#region UpdatePeople
[Fact]
public void UpdatePeople_ShouldAddNewPeople()
protected override async Task ResetDb()
{
var allPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
};
var peopleAdded = new List<Person>();
PersonHelper.UpdatePeople(allPeople, new[] {"Joseph Shmo", "Sally Ann"}, PersonRole.Writer, person =>
{
peopleAdded.Add(person);
});
Assert.Equal(2, peopleAdded.Count);
Assert.Equal(4, allPeople.Count);
_context.Series.RemoveRange(_context.Series.ToList());
await _context.SaveChangesAsync();
}
[Fact]
public void UpdatePeople_ShouldNotAddDuplicatePeople()
{
var allPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
new PersonBuilder("Sally Ann", PersonRole.CoverArtist).Build(),
};
var peopleAdded = new List<Person>();
PersonHelper.UpdatePeople(allPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.CoverArtist, person =>
{
peopleAdded.Add(person);
});
Assert.Equal(3, allPeople.Count);
}
#endregion
#region UpdatePeopleList
[Fact]
public void UpdatePeopleList_NullTags_NoChanges()
{
// Arrange
ICollection<PersonDto> tags = null;
var series = new SeriesBuilder("Test Series").Build();
var allTags = new List<Person>();
var handleAddCalled = false;
var onModifiedCalled = false;
// Act
PersonHelper.UpdatePeopleList(PersonRole.Writer, tags, series, allTags, p => handleAddCalled = true, () => onModifiedCalled = true);
// Assert
Assert.False(handleAddCalled);
Assert.False(onModifiedCalled);
}
[Fact]
public void UpdatePeopleList_AddNewTag_TagAddedAndOnModifiedCalled()
{
// Arrange
const PersonRole role = PersonRole.Writer;
var tags = new List<PersonDto>
{
new PersonDto { Id = 1, Name = "John Doe", Role = role }
};
var series = new SeriesBuilder("Test Series").Build();
var allTags = new List<Person>();
var handleAddCalled = false;
var onModifiedCalled = false;
// Act
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
{
handleAddCalled = true;
series.Metadata.People.Add(p);
}, () => onModifiedCalled = true);
// Assert
Assert.True(handleAddCalled);
Assert.True(onModifiedCalled);
Assert.Single(series.Metadata.People);
Assert.Equal("John Doe", series.Metadata.People.First().Name);
}
[Fact]
public void UpdatePeopleList_RemoveExistingTag_TagRemovedAndOnModifiedCalled()
{
// Arrange
const PersonRole role = PersonRole.Writer;
var tags = new List<PersonDto>();
var series = new SeriesBuilder("Test Series").Build();
var person = new PersonBuilder("John Doe", role).Build();
person.Id = 1;
series.Metadata.People.Add(person);
var allTags = new List<Person>
{
person
};
var handleAddCalled = false;
var onModifiedCalled = false;
// Act
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
{
handleAddCalled = true;
series.Metadata.People.Add(p);
}, () => onModifiedCalled = true);
// Assert
Assert.False(handleAddCalled);
Assert.True(onModifiedCalled);
Assert.Empty(series.Metadata.People);
}
[Fact]
public void UpdatePeopleList_UpdateExistingTag_OnModifiedCalled()
{
// Arrange
const PersonRole role = PersonRole.Writer;
var tags = new List<PersonDto>
{
new PersonDto { Id = 1, Name = "John Doe", Role = role }
};
var series = new SeriesBuilder("Test Series").Build();
var person = new PersonBuilder("John Doe", role).Build();
person.Id = 1;
series.Metadata.People.Add(person);
var allTags = new List<Person>
{
person
};
var handleAddCalled = false;
var onModifiedCalled = false;
// Act
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
{
handleAddCalled = true;
series.Metadata.People.Add(p);
}, () => onModifiedCalled = true);
// Assert
Assert.False(handleAddCalled);
Assert.False(onModifiedCalled);
Assert.Single(series.Metadata.People);
Assert.Equal("John Doe", series.Metadata.People.First().Name);
}
[Fact]
public void UpdatePeopleList_NoChanges_HandleAddAndOnModifiedNotCalled()
{
// Arrange
const PersonRole role = PersonRole.Writer;
var tags = new List<PersonDto>
{
new PersonDto { Id = 1, Name = "John Doe", Role = role }
};
var series = new SeriesBuilder("Test Series").Build();
var person = new PersonBuilder("John Doe", role).Build();
person.Id = 1;
series.Metadata.People.Add(person);
var allTags = new List<Person>
{
new PersonBuilder("John Doe", role).Build()
};
var handleAddCalled = false;
var onModifiedCalled = false;
// Act
PersonHelper.UpdatePeopleList(role, tags, series, allTags, p =>
{
handleAddCalled = true;
series.Metadata.People.Add(p);
}, () => onModifiedCalled = true);
// Assert
Assert.False(handleAddCalled);
Assert.False(onModifiedCalled);
Assert.Single(series.Metadata.People);
Assert.Equal("John Doe", series.Metadata.People.First().Name);
}
#endregion
#region RemovePeople
[Fact]
public void RemovePeople_ShouldRemovePeopleOfSameRole()
{
var existingPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
};
var peopleRemoved = new List<Person>();
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
{
peopleRemoved.Add(person);
});
Assert.NotEqual(existingPeople, peopleRemoved);
Assert.Single(peopleRemoved);
}
[Fact]
public void RemovePeople_ShouldRemovePeopleFromBothRoles()
{
var existingPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
};
var peopleRemoved = new List<Person>();
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo", "Sally Ann"}, PersonRole.Writer, person =>
{
peopleRemoved.Add(person);
});
Assert.NotEqual(existingPeople, peopleRemoved);
Assert.Single(peopleRemoved);
PersonHelper.RemovePeople(existingPeople, new[] {"Joe Shmo"}, PersonRole.CoverArtist, person =>
{
peopleRemoved.Add(person);
});
Assert.Empty(existingPeople);
Assert.Equal(2, peopleRemoved.Count);
}
[Fact]
public void RemovePeople_ShouldRemovePeopleOfSameRole_WhenNothingPassed()
{
var existingPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
};
var peopleRemoved = new List<Person>();
PersonHelper.RemovePeople(existingPeople, new List<string>(), PersonRole.Writer, person =>
{
peopleRemoved.Add(person);
});
Assert.NotEqual(existingPeople, peopleRemoved);
Assert.Equal(2, peopleRemoved.Count);
}
#endregion
#region KeepOnlySamePeopleBetweenLists
[Fact]
public void KeepOnlySamePeopleBetweenLists()
{
var existingPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
new PersonBuilder("Sally", PersonRole.Writer).Build(),
};
var peopleFromChapters = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
};
var peopleRemoved = new List<Person>();
PersonHelper.KeepOnlySamePeopleBetweenLists(existingPeople,
peopleFromChapters, person =>
{
peopleRemoved.Add(person);
});
Assert.Equal(2, peopleRemoved.Count);
}
#endregion
#region AddPeople
[Fact]
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonDoesNotExist()
{
// Arrange
var metadataPeople = new List<Person>();
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
// Act
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
// Assert
Assert.Single(metadataPeople);
Assert.Contains(person, metadataPeople);
}
[Fact]
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonAlreadyExists()
{
// Arrange
var metadataPeople = new List<Person>
{
new PersonBuilder("John Smith", PersonRole.Character)
.WithId(1)
.Build()
};
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
// Act
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
// Assert
Assert.Single(metadataPeople);
Assert.NotNull(metadataPeople.SingleOrDefault(p =>
p.Name.Equals(person.Name) && p.Role == person.Role && p.NormalizedName == person.NormalizedName));
Assert.Equal(1, metadataPeople.First().Id);
}
[Fact]
public void AddPersonIfNotExists_ShouldNotAddPerson_WhenPersonNameIsNullOrEmpty()
{
// Arrange
var metadataPeople = new List<Person>();
var person2 = new PersonBuilder(string.Empty, PersonRole.Character).Build();
// Act
PersonHelper.AddPersonIfNotExists(metadataPeople, person2);
// Assert
Assert.Empty(metadataPeople);
}
[Fact]
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsDifferentButRoleIsSame()
{
// Arrange
var metadataPeople = new List<Person>
{
new PersonBuilder("John Smith", PersonRole.Character).Build()
};
var person = new PersonBuilder("John Doe", PersonRole.Character).Build();
// Act
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
// Assert
Assert.Equal(2, metadataPeople.Count);
Assert.Contains(person, metadataPeople);
}
[Fact]
public void AddPersonIfNotExists_ShouldAddPerson_WhenPersonNameIsSameButRoleIsDifferent()
{
// Arrange
var metadataPeople = new List<Person>
{
new PersonBuilder("John Doe", PersonRole.Writer).Build()
};
var person = new PersonBuilder("John Smith", PersonRole.Character).Build();
// Act
PersonHelper.AddPersonIfNotExists(metadataPeople, person);
// Assert
Assert.Equal(2, metadataPeople.Count);
Assert.Contains(person, metadataPeople);
}
[Fact]
public void AddPeople_ShouldAddOnlyNonExistingPeople()
{
var existingPeople = new List<Person>
{
new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build(),
new PersonBuilder("Joe Shmo", PersonRole.Writer).Build(),
new PersonBuilder("Sally", PersonRole.Writer).Build(),
};
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.CoverArtist).Build());
Assert.Equal(3, existingPeople.Count);
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo", PersonRole.Writer).Build());
Assert.Equal(3, existingPeople.Count);
PersonHelper.AddPersonIfNotExists(existingPeople, new PersonBuilder("Joe Shmo Two", PersonRole.CoverArtist).Build());
Assert.Equal(4, existingPeople.Count);
}
#endregion
//
// // 1. Test adding new people and keeping existing ones
// [Fact]
// public async Task UpdateChapterPeopleAsync_AddNewPeople_ExistingPersonRetained()
// {
// var existingPerson = new PersonBuilder("Joe Shmo").Build();
// var chapter = new ChapterBuilder("1").Build();
//
// // Create an existing person and assign them to the series with a role
// var series = new SeriesBuilder("Test 1")
// .WithFormat(MangaFormat.Archive)
// .WithMetadata(new SeriesMetadataBuilder()
// .WithPerson(existingPerson, PersonRole.Editor)
// .Build())
// .WithVolume(new VolumeBuilder("1").WithChapter(chapter).Build())
// .Build();
//
// _unitOfWork.SeriesRepository.Add(series);
// await _unitOfWork.CommitAsync();
//
// // Call UpdateChapterPeopleAsync with one existing and one new person
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo", "New Person" }, PersonRole.Editor, _unitOfWork);
//
// // Assert existing person retained and new person added
// var people = await _unitOfWork.PersonRepository.GetAllPeople();
// Assert.Contains(people, p => p.Name == "Joe Shmo");
// Assert.Contains(people, p => p.Name == "New Person");
//
// var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
// Assert.Contains("Joe Shmo", chapterPeople);
// Assert.Contains("New Person", chapterPeople);
// }
//
// // 2. Test removing a person no longer in the list
// [Fact]
// public async Task UpdateChapterPeopleAsync_RemovePeople()
// {
// var existingPerson1 = new PersonBuilder("Joe Shmo").Build();
// var existingPerson2 = new PersonBuilder("Jane Doe").Build();
// var chapter = new ChapterBuilder("1").Build();
//
// var series = new SeriesBuilder("Test 1")
// .WithVolume(new VolumeBuilder("1")
// .WithChapter(new ChapterBuilder("1")
// .WithPerson(existingPerson1, PersonRole.Editor)
// .WithPerson(existingPerson2, PersonRole.Editor)
// .Build())
// .Build())
// .Build();
//
// _unitOfWork.SeriesRepository.Add(series);
// await _unitOfWork.CommitAsync();
//
// // Call UpdateChapterPeopleAsync with only one person
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
//
// var people = await _unitOfWork.PersonRepository.GetAllPeople();
// Assert.DoesNotContain(people, p => p.Name == "Jane Doe");
//
// var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
// Assert.Contains("Joe Shmo", chapterPeople);
// Assert.DoesNotContain("Jane Doe", chapterPeople);
// }
//
// // 3. Test no changes when the list of people is the same
// [Fact]
// public async Task UpdateChapterPeopleAsync_NoChanges()
// {
// var existingPerson = new PersonBuilder("Joe Shmo").Build();
// var chapter = new ChapterBuilder("1").Build();
//
// var series = new SeriesBuilder("Test 1")
// .WithVolume(new VolumeBuilder("1")
// .WithChapter(new ChapterBuilder("1")
// .WithPerson(existingPerson, PersonRole.Editor)
// .Build())
// .Build())
// .Build();
//
// _unitOfWork.SeriesRepository.Add(series);
// await _unitOfWork.CommitAsync();
//
// // Call UpdateChapterPeopleAsync with the same list
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
//
// var people = await _unitOfWork.PersonRepository.GetAllPeople();
// Assert.Contains(people, p => p.Name == "Joe Shmo");
//
// var chapterPeople = chapter.People.Select(cp => cp.Person.Name).ToList();
// Assert.Contains("Joe Shmo", chapterPeople);
// Assert.Single(chapter.People); // No duplicate entries
// }
//
// // 4. Test multiple roles for a person
// [Fact]
// public async Task UpdateChapterPeopleAsync_MultipleRoles()
// {
// var person = new PersonBuilder("Joe Shmo").Build();
// var chapter = new ChapterBuilder("1").Build();
//
// var series = new SeriesBuilder("Test 1")
// .WithVolume(new VolumeBuilder("1")
// .WithChapter(new ChapterBuilder("1")
// .WithPerson(person, PersonRole.Writer) // Assign person as Writer
// .Build())
// .Build())
// .Build();
//
// _unitOfWork.SeriesRepository.Add(series);
// await _unitOfWork.CommitAsync();
//
// // Add same person as Editor
// await PersonHelper.UpdateChapterPeopleAsync(chapter, new List<string> { "Joe Shmo" }, PersonRole.Editor, _unitOfWork);
//
// // Ensure that the same person is assigned with two roles
// var chapterPeople = chapter.People.Where(cp => cp.Person.Name == "Joe Shmo").ToList();
// Assert.Equal(2, chapterPeople.Count); // One for each role
// Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Writer);
// Assert.Contains(chapterPeople, cp => cp.Role == PersonRole.Editor);
// }
}

View File

@ -1,128 +0,0 @@
using System.Collections.Generic;
using System.Linq;
using API.Data;
using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using Xunit;
namespace API.Tests.Helpers;
public class TagHelperTests
{
[Fact]
public void UpdateTag_ShouldAddNewTag()
{
var allTags = new Dictionary<string, Tag>
{
{"Action".ToNormalized(), new TagBuilder("Action").Build()},
{"Sci-fi".ToNormalized(), new TagBuilder("Sci-fi").Build()}
};
var tagCalled = new List<Tag>();
var addedCount = 0;
TagHelper.UpdateTag(allTags, new[] {"Action", "Adventure"}, (tag, added) =>
{
if (added)
{
addedCount++;
}
tagCalled.Add(tag);
});
Assert.Equal(1, addedCount);
Assert.Equal(2, tagCalled.Count());
Assert.Equal(3, allTags.Count);
}
[Fact]
public void UpdateTag_ShouldNotAddDuplicateTag()
{
var allTags = new Dictionary<string, Tag>
{
{"Action".ToNormalized(), new TagBuilder("Action").Build()},
{"Sci-fi".ToNormalized(), new TagBuilder("Sci-fi").Build()}
};
var tagCalled = new List<Tag>();
var addedCount = 0;
TagHelper.UpdateTag(allTags, new[] {"Action", "Scifi"}, (tag, added) =>
{
if (added)
{
addedCount++;
}
tagCalled.Add(tag);
});
Assert.Equal(2, allTags.Count);
Assert.Equal(0, addedCount);
}
[Fact]
public void AddTag_ShouldAddOnlyNonExistingTag()
{
var existingTags = new List<Tag>
{
new TagBuilder("Action").Build(),
new TagBuilder("action").Build(),
new TagBuilder("Sci-fi").Build(),
};
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Action").Build());
Assert.Equal(3, existingTags.Count);
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("action").Build());
Assert.Equal(3, existingTags.Count);
TagHelper.AddTagIfNotExists(existingTags, new TagBuilder("Shonen").Build());
Assert.Equal(4, existingTags.Count);
}
[Fact]
public void KeepOnlySamePeopleBetweenLists()
{
var existingTags = new List<Tag>
{
new TagBuilder("Action").Build(),
new TagBuilder("Sci-fi").Build(),
};
var peopleFromChapters = new List<Tag>
{
new TagBuilder("Action").Build(),
};
var tagRemoved = new List<Tag>();
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
peopleFromChapters, tag =>
{
tagRemoved.Add(tag);
});
Assert.Single(tagRemoved);
}
[Fact]
public void RemoveEveryoneIfNothingInRemoveAllExcept()
{
var existingTags = new List<Tag>
{
new TagBuilder("Action").Build(),
new TagBuilder("Sci-fi").Build(),
};
var peopleFromChapters = new List<Tag>();
var tagRemoved = new List<Tag>();
TagHelper.KeepOnlySameTagBetweenLists(existingTags,
peopleFromChapters, tag =>
{
tagRemoved.Add(tag);
});
Assert.Equal(2, tagRemoved.Count);
}
}

View File

@ -138,13 +138,31 @@ public class BasicParserTests
[Fact]
public void Parse_MangaLibrary_SpecialInFilename()
{
var actual = _parser.Parse("C:/Books/Summer Time Rendering/Specials/Volume Omake.cbr",
var actual = _parser.Parse("C:/Books/Summer Time Rendering/Volume SP01.cbr",
"C:/Books/Summer Time Rendering/",
RootDirectory, LibraryType.Manga, null);
Assert.NotNull(actual);
Assert.Equal("Summer Time Rendering", actual.Series);
Assert.Equal("Volume Omake", actual.Title);
Assert.Equal("Volume SP01", actual.Title);
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
Assert.True(actual.IsSpecial);
}
/// <summary>
/// Tests that when the filename parses as a speical, it appropriately parses
/// </summary>
[Fact]
public void Parse_MangaLibrary_SpecialInFilename2()
{
var actual = _parser.Parse("M:/Kimi wa Midara na Boku no Joou/Specials/[Renzokusei] Special 1 SP02.zip",
"M:/Kimi wa Midara na Boku no Joou/",
RootDirectory, LibraryType.Manga, null);
Assert.NotNull(actual);
Assert.Equal("Kimi wa Midara na Boku no Joou", actual.Series);
Assert.Equal("[Renzokusei] Special 1 SP02", actual.Title);
Assert.Equal(Parser.SpecialVolume, actual.Volumes);
Assert.Equal(Parser.DefaultChapter, actual.Chapters);
Assert.True(actual.IsSpecial);

View File

@ -408,7 +408,7 @@ public class DefaultParserTests
expected = new ParserInfo
{
Series = "Foo 50", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, IsSpecial = true,
Chapters = "50", Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
Chapters = Parser.DefaultChapter, Filename = "Foo 50 SP01.cbz", Format = MangaFormat.Archive,
FullFilePath = filepath
};

View File

@ -21,24 +21,4 @@ public class BookParsingTests
{
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseVolume(filename, LibraryType.Book));
}
// [Theory]
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA", "@font-face{font-family:'syyskuu_repaleinen';src:url(data:font/opentype;base64,AAEAAAA")]
// [InlineData("@font-face{font-family:'syyskuu_repaleinen';src:url('fonts/font.css')", "@font-face{font-family:'syyskuu_repaleinen';src:url('TEST/fonts/font.css')")]
// public void ReplaceFontSrcUrl(string input, string expected)
// {
// var apiBase = "TEST/";
// var actual = API.Parser.Parser.FontSrcUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
// Assert.Equal(expected, actual);
// }
//
// [Theory]
// [InlineData("@import url('font.css');", "@import url('TEST/font.css');")]
// public void ReplaceImportSrcUrl(string input, string expected)
// {
// var apiBase = "TEST/";
// var actual = API.Parser.Parser.CssImportUrlRegex.Replace(input, "$1" + apiBase + "$2" + "$3");
// Assert.Equal(expected, actual);
// }
}

View File

@ -1,11 +1,6 @@
using System.IO.Abstractions.TestingHelpers;
using API.Entities.Enums;
using API.Services;
using API.Services.Tasks.Scanner.Parser;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Parsing;
@ -73,41 +68,41 @@ public class ComicParsingTests
[InlineData("SKY WORLD สกายเวิลด์ เล่มที่ 1", "SKY WORLD สกายเวิลด์")]
public void ParseComicSeriesTest(string filename, string expected)
{
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicSeries(filename));
Assert.Equal(expected, Parser.ParseComicSeries(filename));
}
[Theory]
[InlineData("01 Spider-Man & Wolverine 01.cbr", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman & Catwoman - Trail of the Gun 01", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman & Daredevil - King of New York", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman & Robin the Teen Wonder #0", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman & Wildcat (1 of 3)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman And Superman World's Finest #01", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Babe 01", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("01 Spider-Man & Wolverine 01.cbr", Parser.LooseLeafVolume)]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.LooseLeafVolume)]
[InlineData("Batman & Catwoman - Trail of the Gun 01", Parser.LooseLeafVolume)]
[InlineData("Batman & Daredevil - King of New York", Parser.LooseLeafVolume)]
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", Parser.LooseLeafVolume)]
[InlineData("Batman & Robin the Teen Wonder #0", Parser.LooseLeafVolume)]
[InlineData("Batman & Wildcat (1 of 3)", Parser.LooseLeafVolume)]
[InlineData("Batman And Superman World's Finest #01", Parser.LooseLeafVolume)]
[InlineData("Babe 01", Parser.LooseLeafVolume)]
[InlineData("Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life (2004)", Parser.LooseLeafVolume)]
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", Parser.LooseLeafVolume)]
[InlineData("Superman v1 024 (09-10 1943)", "1")]
[InlineData("Superman v1.5 024 (09-10 1943)", "1.5")]
[InlineData("Amazing Man Comics chapter 25", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("spawn-123", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("spawn-chapter-123", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman Beyond 04 (of 6) (1999)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman Beyond 001 (2012)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman Beyond 2.0 001 (2013)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Amazing Man Comics chapter 25", Parser.LooseLeafVolume)]
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", Parser.LooseLeafVolume)]
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", Parser.LooseLeafVolume)]
[InlineData("spawn-123", Parser.LooseLeafVolume)]
[InlineData("spawn-chapter-123", Parser.LooseLeafVolume)]
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", Parser.LooseLeafVolume)]
[InlineData("Batman Beyond 04 (of 6) (1999)", Parser.LooseLeafVolume)]
[InlineData("Batman Beyond 001 (2012)", Parser.LooseLeafVolume)]
[InlineData("Batman Beyond 2.0 001 (2013)", Parser.LooseLeafVolume)]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", Parser.LooseLeafVolume)]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.LooseLeafVolume)]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
[InlineData("Batgirl V2000 #57", "2000")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", Parser.LooseLeafVolume)]
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", Parser.LooseLeafVolume)]
[InlineData("Daredevil - v6 - 10 - (2019)", "6")]
[InlineData("Daredevil - v6.5", "6.5")]
// Tome Tests
@ -117,25 +112,25 @@ public class ComicParsingTests
[InlineData("Conquistador_Tome_2", "2")]
[InlineData("Max_l_explorateur-_Tome_0", "0")]
[InlineData("Chevaliers d'Héliopolis T3 - Rubedo, l'oeuvre au rouge (Jodorowsky & Jérémy)", "3")]
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", Parser.LooseLeafVolume)]
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", "1")]
// Russian Tests
[InlineData("Kebab Том 1 Глава 3", "1")]
[InlineData("Манга Глава 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("Манга Глава 2", Parser.LooseLeafVolume)]
[InlineData("ย้อนเวลากลับมาร้าย เล่ม 1", "1")]
[InlineData("เด็กคนนี้ขอลาออกจากการเป็นเจ้าของปราสาท เล่ม 1 ตอนที่ 3", "1")]
[InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume)]
[InlineData("วิวาห์รัก เดิมพันชีวิต ตอนที่ 2", Parser.LooseLeafVolume)]
public void ParseComicVolumeTest(string filename, string expected)
{
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseComicVolume(filename));
Assert.Equal(expected, Parser.ParseComicVolume(filename));
}
[Theory]
[InlineData("01 Spider-Man & Wolverine 01.cbr", "1")]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", Parser.DefaultChapter)]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", Parser.DefaultChapter)]
[InlineData("Batman & Catwoman - Trail of the Gun 01", "1")]
[InlineData("Batman & Daredevil - King of New York", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Batman & Daredevil - King of New York", Parser.DefaultChapter)]
[InlineData("Batman & Grendel (1996) 01 - Devil's Bones", "1")]
[InlineData("Batman & Robin the Teen Wonder #0", "0")]
[InlineData("Batman & Wildcat (1 of 3)", "1")]
@ -159,8 +154,8 @@ public class ComicParsingTests
[InlineData("Batman Beyond 001 (2012)", "1")]
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", Parser.DefaultChapter)]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", Parser.DefaultChapter)]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
[InlineData("Batgirl V2000 #57", "57")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
@ -169,7 +164,7 @@ public class ComicParsingTests
[InlineData("Daredevil - v6 - 10 - (2019)", "10")]
[InlineData("Batman Beyond 2016 - Chapter 001.cbz", "1")]
[InlineData("Adventure Time (2012)/Adventure Time #1 (2012)", "1")]
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", API.Services.Tasks.Scanner.Parser.Parser.DefaultChapter)]
[InlineData("Adventure Time TPB (2012)/Adventure Time v01 (2012).cbz", Parser.DefaultChapter)]
[InlineData("Kebab Том 1 Глава 3", "3")]
[InlineData("Манга Глава 2", "2")]
[InlineData("Манга 2 Глава", "2")]
@ -179,35 +174,35 @@ public class ComicParsingTests
[InlineData("หนึ่งความคิด นิจนิรันดร์ บทที่ 112", "112")]
public void ParseComicChapterTest(string filename, string expected)
{
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseChapter(filename, LibraryType.Comic));
Assert.Equal(expected, Parser.ParseChapter(filename, LibraryType.Comic));
}
[Theory]
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", false)]
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", false)]
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
[InlineData("Boule et Bill - THS -Bill à disparu", true)]
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", true)]
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", true)]
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", false)]
[InlineData("Boule et Bill - THS -Bill à disparu", false)]
[InlineData("Asterix - HS - Les 12 travaux d'Astérix", false)]
[InlineData("Sillage Hors Série - Le Collectionneur - Concordance-DKFR", false)]
[InlineData("laughs", false)]
[InlineData("Annual Days of Summer", true)]
[InlineData("Adventure Time 2013 Annual #001 (2013)", true)]
[InlineData("Adventure Time 2013_Annual_#001 (2013)", true)]
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", true)]
[InlineData("Annual Days of Summer", false)]
[InlineData("Adventure Time 2013 Annual #001 (2013)", false)]
[InlineData("Adventure Time 2013_Annual_#001 (2013)", false)]
[InlineData("Adventure Time 2013_-_Annual #001 (2013)", false)]
[InlineData("G.I. Joe - A Real American Hero Yearbook 004 Reprint (2021)", false)]
[InlineData("Mazebook 001", false)]
[InlineData("X-23 One Shot (2010)", true)]
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", true)]
[InlineData("Batman Beyond Annual", true)]
[InlineData("Batman Beyond Bonus", true)]
[InlineData("Batman Beyond OneShot", true)]
[InlineData("Batman Beyond Specials", true)]
[InlineData("Batman Beyond Omnibus (1999)", true)]
[InlineData("Batman Beyond Omnibus", true)]
[InlineData("01 Annual Batman Beyond", true)]
[InlineData("Blood Syndicate Annual #001", true)]
[InlineData("X-23 One Shot (2010)", false)]
[InlineData("Casus Belli v1 Hors-Série 21 - Mousquetaires et Sorcellerie", false)]
[InlineData("Batman Beyond Annual", false)]
[InlineData("Batman Beyond Bonus", false)]
[InlineData("Batman Beyond OneShot", false)]
[InlineData("Batman Beyond Specials", false)]
[InlineData("Batman Beyond Omnibus (1999)", false)]
[InlineData("Batman Beyond Omnibus", false)]
[InlineData("01 Annual Batman Beyond", false)]
[InlineData("Blood Syndicate Annual #001", false)]
public void IsComicSpecialTest(string input, bool expected)
{
Assert.Equal(expected, Parser.IsSpecial(input, LibraryType.Comic));

View File

@ -326,18 +326,18 @@ public class MangaParsingTests
Assert.Equal(expected, API.Services.Tasks.Scanner.Parser.Parser.ParseEdition(input));
}
[Theory]
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
[InlineData("Beelzebub_Omake_June_2012_RHS", true)]
[InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", false)]
[InlineData("Beelzebub_Omake_June_2012_RHS", false)]
[InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
[InlineData("Ani-Hina Art Collection.cbz", true)]
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
[InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
[InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", false)]
[InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", false)]
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", false)]
[InlineData("Ani-Hina Art Collection.cbz", false)]
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", false)]
[InlineData("A Town Where You Live - Bonus Chapter.zip", false)]
[InlineData("Yuki Merry - 4-Komga Anthology", false)]
[InlineData("Beastars - SP01", false)]
[InlineData("Beastars SP01", false)]
[InlineData("Beastars - SP01", true)]
[InlineData("Beastars SP01", true)]
[InlineData("The League of Extraordinary Gentlemen", false)]
[InlineData("The League of Extra-ordinary Gentlemen", false)]
[InlineData("Dr. Ramune - Mysterious Disease Specialist v01 (2020) (Digital) (danke-Empire)", false)]

View File

@ -6,6 +6,7 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
using API.Services;
using Kavita.Common.Helpers;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
@ -745,6 +746,12 @@ public class DirectoryServiceTests
[InlineData(new [] {"/manga"},
new [] {"/manga/Love Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
"/manga/Love Hina")]
[InlineData(new [] {"/manga"},
new [] {"/manga/Love Hina/Hina/Vol. 01.cbz", "/manga/Love Hina/Specials/Sp01.cbz"},
"/manga/Love Hina")]
[InlineData(new [] {"/manga"},
new [] {"/manga/Dress Up Darling/Dress Up Darling Ch 01.cbz", "/manga/Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz"},
"/manga/Dress Up Darling")]
public void FindLowestDirectoriesFromFilesTest(string[] rootDirectories, string[] files, string expectedDirectory)
{
var fileSystem = new MockFileSystem();
@ -920,8 +927,9 @@ public class DirectoryServiceTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
var globMatcher = new GlobMatcher();
globMatcher.AddExclude("*.*");
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
Assert.Empty(allFiles);
@ -945,7 +953,9 @@ public class DirectoryServiceTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
var globMatcher = new GlobMatcher();
globMatcher.AddExclude("**/Accel World/*");
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
Assert.Single(allFiles); // Ignore files are not counted in files, only valid extensions
@ -974,7 +984,10 @@ public class DirectoryServiceTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fileSystem);
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions);
var globMatcher = new GlobMatcher();
globMatcher.AddExclude("**/Accel World/*");
globMatcher.AddExclude("**/ArtBooks/*");
var allFiles = ds.ScanFiles("C:/Data/", API.Services.Tasks.Scanner.Parser.Parser.SupportedExtensions, globMatcher);
Assert.Equal(2, allFiles.Count); // Ignore files are not counted in files, only valid extensions

View File

@ -206,24 +206,6 @@ public class ParseScannedFilesTests : AbstractDbTest
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
// var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
//
// Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
// {
// var skippedScan = parsedInfo.Item1;
// var parsedFiles = parsedInfo.Item2;
// if (parsedFiles.Count == 0) return Task.CompletedTask;
//
// var foundParsedSeries = new ParsedSeries()
// {
// Name = parsedFiles.First().Series,
// NormalizedName = parsedFiles.First().Series.ToNormalized(),
// Format = parsedFiles.First().Format
// };
//
// parsedSeries.Add(foundParsedSeries, parsedFiles);
// return Task.CompletedTask;
// }
var library =
await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
@ -273,7 +255,7 @@ public class ParseScannedFilesTests : AbstractDbTest
var directoriesSeen = new HashSet<string>();
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
var scanResults = await psf.ProcessFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
var scanResults = await psf.ScanFiles("C:/Data/", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
foreach (var scanResult in scanResults)
{
directoriesSeen.Add(scanResult.Folder);
@ -295,7 +277,7 @@ public class ParseScannedFilesTests : AbstractDbTest
Assert.NotNull(library);
var directoriesSeen = new HashSet<string>();
var scanResults = await psf.ProcessFiles("C:/Data/", false,
var scanResults = await psf.ScanFiles("C:/Data/", false,
await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
foreach (var scanResult in scanResults)
@ -328,7 +310,7 @@ public class ParseScannedFilesTests : AbstractDbTest
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
Assert.NotNull(library);
var scanResults = await psf.ProcessFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
var scanResults = await psf.ScanFiles("C:/Data", true, await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
Assert.Equal(2, scanResults.Count);
}
@ -357,7 +339,7 @@ public class ParseScannedFilesTests : AbstractDbTest
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1,
LibraryIncludes.Folders | LibraryIncludes.FileTypes);
Assert.NotNull(library);
var scanResults = await psf.ProcessFiles("C:/Data", false,
var scanResults = await psf.ScanFiles("C:/Data", false,
await _unitOfWork.SeriesRepository.GetFolderPathMap(1), library);
Assert.Single(scanResults);

View File

@ -50,65 +50,14 @@ public class ScannerServiceTests : AbstractDbTest
await _context.SaveChangesAsync();
}
[Fact]
public void FindSeriesNotOnDisk_Should_Remove1()
{
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Archive});
//AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Volumes = "1", Format = MangaFormat.Epub});
var existingSeries = new List<Series>
{
new SeriesBuilder("Darker Than Black")
.WithFormat(MangaFormat.Epub)
.WithVolume(new VolumeBuilder("1")
.WithName("1")
.Build())
.WithLocalizedName("Darker Than Black")
.Build()
};
Assert.Single(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var infos = new Dictionary<ParsedSeries, IList<ParserInfo>>();
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive});
ParserInfoFactory.AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive});
var existingSeries = new List<Series>
{
new SeriesBuilder("Cage of Eden")
.WithFormat(MangaFormat.Archive)
.WithVolume(new VolumeBuilder("1")
.WithName("1")
.Build())
.WithLocalizedName("Darker Than Black")
.Build(),
new SeriesBuilder("Darker Than Black")
.WithFormat(MangaFormat.Archive)
.WithVolume(new VolumeBuilder("1")
.WithName("1")
.Build())
.WithLocalizedName("Darker Than Black")
.Build(),
};
Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
[Fact]
public async Task ScanLibrary_ComicVine_PublisherFolder()
{
var testcase = "Publisher - ComicVine.json";
var postLib = await GenerateScannerData(testcase);
var library = await GenerateScannerData(testcase);
var scanner = CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count);
@ -118,18 +67,67 @@ public class ScannerServiceTests : AbstractDbTest
public async Task ScanLibrary_ShouldCombineNestedFolder()
{
var testcase = "Series and Series-Series Combined - Manga.json";
var postLib = await GenerateScannerData(testcase);
var library = await GenerateScannerData(testcase);
var scanner = CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Single(postLib.Series);
Assert.Equal(2, postLib.Series.First().Volumes.Count);
}
[Fact]
public async Task ScanLibrary_FlatSeries()
{
var testcase = "Flat Series - Manga.json";
var library = await GenerateScannerData(testcase);
var scanner = CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Equal(3, postLib.Series.First().Volumes.Count);
// TODO: Trigger a deletion of ch 10
}
[Fact]
public async Task ScanLibrary_FlatSeriesWithSpecialFolder()
{
var testcase = "Flat Series with Specials Folder - Manga.json";
var library = await GenerateScannerData(testcase);
var scanner = CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Equal(4, postLib.Series.First().Volumes.Count);
Assert.NotNull(postLib.Series.First().Volumes.FirstOrDefault(v => v.Chapters.FirstOrDefault(c => c.IsSpecial) != null));
}
[Fact]
public async Task ScanLibrary_FlatSeriesWithSpecial()
{
const string testcase = "Flat Special - Manga.json";
var library = await GenerateScannerData(testcase);
var scanner = CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Equal(3, postLib.Series.First().Volumes.Count);
Assert.NotNull(postLib.Series.First().Volumes.FirstOrDefault(v => v.Chapters.FirstOrDefault(c => c.IsSpecial) != null));
}
private async Task<Library> GenerateScannerData(string testcase)
{
var testDirectoryPath = await GenerateTestDirectory(Path.Join(_testcasesDirectory, testcase));
_testOutputHelper.WriteLine($"Test Directory Path: {testDirectoryPath}");
var (publisher, type) = SplitPublisherAndLibraryType(Path.GetFileNameWithoutExtension(testcase));
@ -145,25 +143,26 @@ public class ScannerServiceTests : AbstractDbTest
_unitOfWork.LibraryRepository.Add(library);
await _unitOfWork.CommitAsync();
return library;
}
private ScannerService CreateServices()
{
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), new FileSystem());
var mockReadingService = new MockReadingItemService(ds, Substitute.For<IBookService>());
var processSeries = new ProcessSeries(_unitOfWork, Substitute.For<ILogger<ProcessSeries>>(),
Substitute.For<IEventHub>(),
ds, Substitute.For<ICacheHelper>(), mockReadingService, Substitute.For<IFileService>(),
Substitute.For<IMetadataService>(),
Substitute.For<IWordCountAnalyzerService>(), Substitute.For<ICollectionTagService>(),
Substitute.For<IWordCountAnalyzerService>(),
Substitute.For<IReadingListService>(),
Substitute.For<IExternalMetadataService>(), new TagManagerService(_unitOfWork, Substitute.For<ILogger<TagManagerService>>()));
Substitute.For<IExternalMetadataService>());
var scanner = new ScannerService(_unitOfWork, Substitute.For<ILogger<ScannerService>>(),
Substitute.For<IMetadataService>(),
Substitute.For<ICacheService>(), Substitute.For<IEventHub>(), ds,
mockReadingService, processSeries, Substitute.For<IWordCountAnalyzerService>());
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
return postLib;
return scanner;
}
private static (string Publisher, LibraryType Type) SplitPublisherAndLibraryType(string input)
@ -209,6 +208,8 @@ public class ScannerServiceTests : AbstractDbTest
// Generate the files and folders
await Scaffold(testDirectory, filePaths);
_testOutputHelper.WriteLine($"Test Directory Path: {testDirectory}");
return testDirectory;
}

View File

@ -817,12 +817,17 @@ public class SeriesServiceTests : AbstractDbTest
public async Task UpdateSeriesMetadata_ShouldAddNewPerson_NoExistingPeople()
{
await ResetDb();
var g = new PersonBuilder("Existing Person").Build();
await _context.SaveChangesAsync();
var s = new SeriesBuilder("Test")
.WithMetadata(new SeriesMetadataBuilder().Build())
.WithMetadata(new SeriesMetadataBuilder()
.WithPerson(g, PersonRole.Publisher)
.Build())
.Build();
s.Library = new LibraryBuilder("Test LIb", LibraryType.Book).Build();
var g = new PersonBuilder("Existing Person", PersonRole.Publisher).Build();
_context.Series.Add(s);
_context.Person.Add(g);
@ -833,7 +838,7 @@ public class SeriesServiceTests : AbstractDbTest
SeriesMetadata = new SeriesMetadataDto
{
SeriesId = 1,
Publishers = new List<PersonDto> {new () {Id = 0, Name = "Existing Person", Role = PersonRole.Publisher}},
Publishers = new List<PersonDto> {new () {Id = 0, Name = "Existing Person"}},
},
});
@ -842,7 +847,7 @@ public class SeriesServiceTests : AbstractDbTest
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
Assert.NotNull(series.Metadata);
Assert.True(series.Metadata.People.Select(g => g.Name).All(g => g == "Existing Person"));
Assert.True(series.Metadata.People.Select(g => g.Person.Name).All(personName => personName == "Existing Person"));
Assert.False(series.Metadata.PublisherLocked); // PublisherLocked is false unless the UI Explicitly says it should be locked
}
@ -854,10 +859,14 @@ public class SeriesServiceTests : AbstractDbTest
.WithMetadata(new SeriesMetadataBuilder().Build())
.Build();
s.Library = new LibraryBuilder("Test LIb", LibraryType.Book).Build();
var g = new PersonBuilder("Existing Person", PersonRole.Publisher).Build();
s.Metadata.People = new List<Person>
{new PersonBuilder("Existing Writer", PersonRole.Writer).Build(),
new PersonBuilder("Existing Translator", PersonRole.Translator).Build(), new PersonBuilder("Existing Publisher 2", PersonRole.Publisher).Build()};
var g = new PersonBuilder("Existing Person").Build();
s.Metadata.People = new List<SeriesMetadataPeople>
{
new SeriesMetadataPeople() {Person = new PersonBuilder("Existing Writer").Build(), Role = PersonRole.Writer},
new SeriesMetadataPeople() {Person = new PersonBuilder("Existing Translator").Build(), Role = PersonRole.Translator},
new SeriesMetadataPeople() {Person = new PersonBuilder("Existing Publisher 2").Build(), Role = PersonRole.Publisher}
};
_context.Series.Add(s);
_context.Person.Add(g);
@ -868,7 +877,7 @@ public class SeriesServiceTests : AbstractDbTest
SeriesMetadata = new SeriesMetadataDto
{
SeriesId = 1,
Publishers = new List<PersonDto> {new () {Id = 0, Name = "Existing Person", Role = PersonRole.Publisher}},
Publishers = new List<PersonDto> {new () {Id = 0, Name = "Existing Person"}},
PublisherLocked = true
},
@ -878,7 +887,7 @@ public class SeriesServiceTests : AbstractDbTest
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
Assert.NotNull(series.Metadata);
Assert.True(series.Metadata.People.Select(g => g.Name).All(g => g == "Existing Person"));
Assert.True(series.Metadata.People.Select(g => g.Person.Name).All(personName => personName == "Existing Person"));
Assert.True(series.Metadata.PublisherLocked);
}
@ -891,7 +900,7 @@ public class SeriesServiceTests : AbstractDbTest
.WithMetadata(new SeriesMetadataBuilder().Build())
.Build();
s.Library = new LibraryBuilder("Test LIb", LibraryType.Book).Build();
var g = new PersonBuilder("Existing Person", PersonRole.Publisher).Build();
var g = new PersonBuilder("Existing Person").Build();
_context.Series.Add(s);
_context.Person.Add(g);

View File

@ -0,0 +1,5 @@
[
"My Dress-Up Darling/My Dress-Up Darling v01.cbz",
"My Dress-Up Darling/My Dress-Up Darling v02.cbz",
"My Dress-Up Darling/My Dress-Up Darling ch 10.cbz"
]

View File

@ -0,0 +1,6 @@
[
"My Dress-Up Darling/My Dress-Up Darling v01.cbz",
"My Dress-Up Darling/My Dress-Up Darling v02.cbz",
"My Dress-Up Darling/My Dress-Up Darling ch 10.cbz",
"My Dress-Up Darling/Specials/Official Anime Fanbook SP05 (2024) (Digital).cbz"
]

View File

@ -0,0 +1,5 @@
[
"Uzaki-chan Wants to Hang Out!\\Uzaki-chan Wants to Hang Out! - 2022 New Years Special SP01.cbz",
"Uzaki-chan Wants to Hang Out!\\Uzaki-chan Wants to Hang Out! - Ch. 103 - Kouhai and Control.cbz",
"Uzaki-chan Wants to Hang Out!\\Uzaki-chan Wants to Hang Out! v01 (2019) (Digital) (danke-Empire).cbz"
]

View File

@ -0,0 +1,4 @@
[
"My Dress-Up Darling/Chapter 1/01.cbz",
"My Dress-Up Darling/Chapter 2/02.cbz"
]

View File

@ -1,3 +1,4 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=covers/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=dtos_005Cperson/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=wwwroot/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

View File

@ -79,7 +79,8 @@ public class ChapterController : BaseApiController
[HttpPost("update")]
public async Task<ActionResult> UpdateChapterMetadata(UpdateChapterDto dto)
{
var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(dto.Id, ChapterIncludes.People | ChapterIncludes.Genres | ChapterIncludes.Tags);
var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(dto.Id,
ChapterIncludes.People | ChapterIncludes.Genres | ChapterIncludes.Tags);
if (chapter == null)
return BadRequest(_localizationService.Translate(User.GetUserId(), "chapter-doesnt-exist"));
@ -135,105 +136,130 @@ public class ChapterController : BaseApiController
#region Genres
if (dto.Genres != null &&
dto.Genres.Count != 0)
if (dto.Genres is {Count: > 0})
{
var allGenres = (await _unitOfWork.GenreRepository.GetAllGenresByNamesAsync(dto.Genres.Select(t => Parser.Normalize(t.Title)))).ToList();
chapter.Genres ??= new List<Genre>();
GenreHelper.UpdateGenreList(dto.Genres, chapter, allGenres, genre =>
{
chapter.Genres.Add(genre);
}, () => chapter.GenresLocked = true);
await GenreHelper.UpdateChapterGenres(chapter, dto.Genres.Select(t => t.Title), _unitOfWork);
}
#endregion
#region Tags
if (dto.Tags is {Count: > 0})
{
var allTags = (await _unitOfWork.TagRepository
.GetAllTagsByNameAsync(dto.Tags.Select(t => Parser.Normalize(t.Title))))
.ToList();
chapter.Tags ??= new List<Tag>();
TagHelper.UpdateTagList(dto.Tags, chapter, allTags, tag =>
{
chapter.Tags.Add(tag);
}, () => chapter.TagsLocked = true);
await TagHelper.UpdateChapterTags(chapter, dto.Tags.Select(t => t.Title), _unitOfWork);
}
#endregion
#region People
if (PersonHelper.HasAnyPeople(dto))
{
void HandleAddPerson(Person person)
{
PersonHelper.AddPersonIfNotExists(chapter.People, person);
}
chapter.People ??= new List<ChapterPeople>();
chapter.People ??= new List<Person>();
var allWriters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Writer,
dto.Writers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Writer, dto.Writers, chapter, allWriters.AsReadOnly(),
HandleAddPerson, () => chapter.WriterLocked = true);
var allCharacters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Character,
dto.Characters.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Character, dto.Characters, chapter, allCharacters.AsReadOnly(),
HandleAddPerson, () => chapter.CharacterLocked = true);
// Update writers
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Writers.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Writer,
_unitOfWork
);
var allColorists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Colorist,
dto.Colorists.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Colorist, dto.Colorists, chapter, allColorists.AsReadOnly(),
HandleAddPerson, () => chapter.ColoristLocked = true);
// Update characters
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Characters.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Character,
_unitOfWork
);
var allEditors = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Editor,
dto.Editors.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Editor, dto.Editors, chapter, allEditors.AsReadOnly(),
HandleAddPerson, () => chapter.EditorLocked = true);
// Update pencillers
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Pencillers.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Penciller,
_unitOfWork
);
var allInkers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Inker,
dto.Inkers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Inker, dto.Inkers, chapter, allInkers.AsReadOnly(),
HandleAddPerson, () => chapter.InkerLocked = true);
// Update inkers
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Inkers.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Inker,
_unitOfWork
);
var allLetterers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Letterer,
dto.Letterers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Letterer, dto.Letterers, chapter, allLetterers.AsReadOnly(),
HandleAddPerson, () => chapter.LettererLocked = true);
// Update colorists
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Colorists.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Colorist,
_unitOfWork
);
var allPencillers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Penciller,
dto.Pencillers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Penciller, dto.Pencillers, chapter, allPencillers.AsReadOnly(),
HandleAddPerson, () => chapter.PencillerLocked = true);
// Update letterers
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Letterers.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Letterer,
_unitOfWork
);
var allPublishers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Publisher,
dto.Publishers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Publisher, dto.Publishers, chapter, allPublishers.AsReadOnly(),
HandleAddPerson, () => chapter.PublisherLocked = true);
// Update cover artists
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.CoverArtists.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.CoverArtist,
_unitOfWork
);
var allImprints = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Imprint,
dto.Imprints.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Imprint, dto.Imprints, chapter, allImprints.AsReadOnly(),
HandleAddPerson, () => chapter.ImprintLocked = true);
// Update editors
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Editors.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Editor,
_unitOfWork
);
var allTeams = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Team,
dto.Imprints.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Team, dto.Teams, chapter, allTeams.AsReadOnly(),
HandleAddPerson, () => chapter.TeamLocked = true);
// Update publishers
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Publishers.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Publisher,
_unitOfWork
);
var allLocations = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Location,
dto.Imprints.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Location, dto.Locations, chapter, allLocations.AsReadOnly(),
HandleAddPerson, () => chapter.LocationLocked = true);
// Update translators
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Translators.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Translator,
_unitOfWork
);
var allTranslators = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Translator,
dto.Translators.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Translator, dto.Translators, chapter, allTranslators.AsReadOnly(),
HandleAddPerson, () => chapter.TranslatorLocked = true);
// Update imprints
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Imprints.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Imprint,
_unitOfWork
);
var allCoverArtists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.CoverArtist,
dto.CoverArtists.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.CoverArtist, dto.CoverArtists, chapter, allCoverArtists.AsReadOnly(),
HandleAddPerson, () => chapter.CoverArtistLocked = true);
// Update teams
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Teams.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Team,
_unitOfWork
);
// Update locations
await PersonHelper.UpdateChapterPeopleAsync(
chapter,
dto.Locations.Select(p => Parser.Normalize(p.Name)).ToList(),
PersonRole.Location,
_unitOfWork
);
}
#endregion

View File

@ -55,7 +55,6 @@ public class ColorScapeController : BaseApiController
}
private ActionResult<ColorScapeDto> GetColorSpaceDto(IHasCoverImage entity)
{
if (entity == null) return Ok(ColorScapeDto.Empty);

View File

@ -45,7 +45,7 @@ public class ImageController : BaseApiController
/// <param name="chapterId"></param>
/// <returns></returns>
[HttpGet("chapter-cover")]
[ResponseCache(CacheProfileName = ResponseCacheProfiles.Images, VaryByQueryKeys = ["chapterId", "apiKey"])]
[ResponseCache(CacheProfileName = ResponseCacheProfiles.Images, VaryByQueryKeys = new []{"chapterId", "apiKey"})]
public async Task<ActionResult> GetChapterCoverImage(int chapterId, string apiKey)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey);
@ -130,6 +130,7 @@ public class ImageController : BaseApiController
{
var destFile = await GenerateCollectionCoverImage(collectionTagId);
if (string.IsNullOrEmpty(destFile)) return BadRequest(await _localizationService.Translate(userId, "no-cover-image"));
return PhysicalFile(destFile, MimeTypeMap.GetMimeType(_directoryService.FileSystem.Path.GetExtension(destFile)),
_directoryService.FileSystem.Path.GetFileName(destFile));
}
@ -170,6 +171,7 @@ public class ImageController : BaseApiController
ImageService.GetCollectionTagFormat(collectionId));
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
destFile += settings.EncodeMediaAs.GetExtension();
if (_directoryService.FileSystem.File.Exists(destFile)) return destFile;
ImageService.CreateMergedImage(
covers.Select(c => _directoryService.FileSystem.Path.Join(_directoryService.CoverImageDirectory, c)).ToList(),
@ -282,6 +284,43 @@ public class ImageController : BaseApiController
return PhysicalFile(file.FullName, MimeTypeMap.GetMimeType(format), Path.GetFileName(file.FullName));
}
/// <summary>
/// Returns cover image for Person
/// </summary>
/// <param name="personId"></param>
/// <returns></returns>
[HttpGet("person-cover")]
[ResponseCache(CacheProfileName = ResponseCacheProfiles.Images, VaryByQueryKeys = ["personId", "apiKey"])]
public async Task<ActionResult> GetPersonCoverImage(int personId, string apiKey)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey);
if (userId == 0) return BadRequest();
var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.PersonRepository.GetCoverImageAsync(personId));
if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest(await _localizationService.Translate(userId, "no-cover-image"));
var format = _directoryService.FileSystem.Path.GetExtension(path);
return PhysicalFile(path, MimeTypeMap.GetMimeType(format), _directoryService.FileSystem.Path.GetFileName(path));
}
/// <summary>
/// Returns cover image for Person
/// </summary>
/// <param name="name"></param>
/// <returns></returns>
[HttpGet("person-cover-by-name")]
[ResponseCache(CacheProfileName = ResponseCacheProfiles.Images, VaryByQueryKeys = ["personId", "apiKey"])]
public async Task<ActionResult> GetPersonCoverImageByName(string name, string apiKey)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey);
if (userId == 0) return BadRequest();
var path = Path.Join(_directoryService.CoverImageDirectory, await _unitOfWork.PersonRepository.GetCoverImageByNameAsync(name));
if (string.IsNullOrEmpty(path) || !_directoryService.FileSystem.File.Exists(path)) return BadRequest(await _localizationService.Translate(userId, "no-cover-image"));
var format = _directoryService.FileSystem.Path.GetExtension(path);
return PhysicalFile(path, MimeTypeMap.GetMimeType(format), _directoryService.FileSystem.Path.GetFileName(path));
}
/// <summary>
/// Returns a temp coverupload image
/// </summary>

View File

@ -0,0 +1,116 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Services;
using AutoMapper;
using Microsoft.AspNetCore.Mvc;
using Nager.ArticleNumber;
namespace API.Controllers;
public class PersonController : BaseApiController
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILocalizationService _localizationService;
private readonly IMapper _mapper;
public PersonController(IUnitOfWork unitOfWork, ILocalizationService localizationService, IMapper mapper)
{
_unitOfWork = unitOfWork;
_localizationService = localizationService;
_mapper = mapper;
}
[HttpGet]
public async Task<ActionResult<PersonDto>> GetPersonByName(string name)
{
return Ok(await _unitOfWork.PersonRepository.GetPersonDtoByName(name, User.GetUserId()));
}
[HttpGet("roles")]
public async Task<ActionResult<IEnumerable<PersonRole>>> GetRolesForPersonByName(string name)
{
return Ok(await _unitOfWork.PersonRepository.GetRolesForPersonByName(name, User.GetUserId()));
}
/// <summary>
/// Returns a list of authors for browsing
/// </summary>
/// <param name="userParams"></param>
/// <returns></returns>
[HttpPost("authors")]
public async Task<ActionResult<PagedList<BrowsePersonDto>>> GetAuthorsForBrowse([FromQuery] UserParams? userParams)
{
userParams ??= UserParams.Default;
var list = await _unitOfWork.PersonRepository.GetAllWritersAndSeriesCount(User.GetUserId(), userParams);
Response.AddPaginationHeader(list.CurrentPage, list.PageSize, list.TotalCount, list.TotalPages);
return Ok(list);
}
/// <summary>
/// Updates the Person
/// </summary>
/// <param name="dto"></param>
/// <returns></returns>
[HttpPost("update")]
public async Task<ActionResult<PersonDto>> UpdatePerson(UpdatePersonDto dto)
{
// This needs to get all people and update them equally
var person = await _unitOfWork.PersonRepository.GetPersonById(dto.Id);
if (person == null) return BadRequest(_localizationService.Translate(User.GetUserId(), "person-doesnt-exist"));
dto.Description ??= string.Empty;
person.Description = dto.Description;
person.CoverImageLocked = dto.CoverImageLocked;
if (dto.MalId is > 0)
{
person.MalId = (long) dto.MalId;
}
if (dto.AniListId is > 0)
{
person.AniListId = (int) dto.AniListId;
}
if (!string.IsNullOrEmpty(dto.HardcoverId?.Trim()))
{
person.HardcoverId = dto.HardcoverId.Trim();
}
var asin = dto.Asin?.Trim();
if (!string.IsNullOrEmpty(asin) &&
(ArticleNumberHelper.IsValidIsbn10(asin) || ArticleNumberHelper.IsValidIsbn13(asin)))
{
person.Asin = asin;
}
_unitOfWork.PersonRepository.Update(person);
await _unitOfWork.CommitAsync();
return Ok(_mapper.Map<PersonDto>(person));
}
/// <summary>
/// Returns the top 20 series that the "person" is known for. This will use Average Rating when applicable (Kavita+ field), else it's a random sort
/// </summary>
/// <param name="personId"></param>
/// <returns></returns>
[HttpGet("series-known-for")]
public async Task<ActionResult<IEnumerable<SeriesDto>>> GetKnownSeries(int personId)
{
return Ok(await _unitOfWork.PersonRepository.GetSeriesKnownFor(personId));
}
[HttpGet("chapters-by-role")]
public async Task<ActionResult<IEnumerable<StandaloneChapterDto>>> GetChaptersByRole(int personId, PersonRole role)
{
return Ok(await _unitOfWork.PersonRepository.GetChaptersForPersonByRole(personId, User.GetUserId(), role));
}
}

View File

@ -480,5 +480,54 @@ public class UploadController : BaseApiController
return BadRequest(await _localizationService.Translate(User.GetUserId(), "reset-chapter-lock"));
}
/// <summary>
/// Replaces person tag cover image and locks it with a base64 encoded image
/// </summary>
/// <param name="uploadFileDto"></param>
/// <returns></returns>
[Authorize(Policy = "RequireAdminRole")]
[RequestSizeLimit(ControllerConstants.MaxUploadSizeBytes)]
[HttpPost("person")]
public async Task<ActionResult> UploadPersonCoverImageFromUrl(UploadFileDto uploadFileDto)
{
// Check if Url is non-empty, request the image and place in temp, then ask image service to handle it.
// See if we can do this all in memory without touching underlying system
if (string.IsNullOrEmpty(uploadFileDto.Url))
{
return BadRequest(await _localizationService.Translate(User.GetUserId(), "url-required"));
}
try
{
var person = await _unitOfWork.PersonRepository.GetPersonById(uploadFileDto.Id);
if (person == null) return BadRequest(await _localizationService.Translate(User.GetUserId(), "person-doesnt-exist"));
var filePath = await CreateThumbnail(uploadFileDto, $"{ImageService.GetPersonFormat(uploadFileDto.Id)}");
if (!string.IsNullOrEmpty(filePath))
{
person.CoverImage = filePath;
person.CoverImageLocked = true;
_imageService.UpdateColorScape(person);
_unitOfWork.PersonRepository.Update(person);
}
if (_unitOfWork.HasChanges())
{
await _unitOfWork.CommitAsync();
await _eventHub.SendMessageAsync(MessageFactory.CoverUpdate,
MessageFactory.CoverUpdateEvent(person.Id, MessageFactoryEntityTypes.Person), false);
return Ok();
}
}
catch (Exception e)
{
_logger.LogError(e, "There was an issue uploading cover image for Person {Id}", uploadFileDto.Id);
await _unitOfWork.RollbackAsync();
}
return BadRequest(await _localizationService.Translate(User.GetUserId(), "generic-cover-person-save"));
}
}

View File

@ -0,0 +1,16 @@
namespace API.DTOs;
/// <summary>
/// Used to browse writers and click in to see their series
/// </summary>
public class BrowsePersonDto : PersonDto
{
/// <summary>
/// Number of Series this Person is the Writer for
/// </summary>
public int SeriesCount { get; set; }
/// <summary>
/// Number or Issues this Person is the Writer for
/// </summary>
public int IssueCount { get; set; }
}

View File

@ -0,0 +1,37 @@
namespace API.DTOs;
public class PersonDto
{
public int Id { get; set; }
public required string Name { get; set; }
public bool CoverImageLocked { get; set; }
public string PrimaryColor { get; set; }
public string SecondaryColor { get; set; }
public string? CoverImage { get; set; }
public string Description { get; set; }
/// <summary>
/// ASIN for person
/// </summary>
/// <remarks>Can be used for Amazon author lookup</remarks>
public string? Asin { get; set; }
/// <summary>
/// https://anilist.co/staff/{AniListId}/
/// </summary>
/// <remarks>Kavita+ Only</remarks>
public int AniListId { get; set; } = 0;
/// <summary>
/// https://myanimelist.net/people/{MalId}/
/// https://myanimelist.net/character/{MalId}/CharacterName
/// </summary>
/// <remarks>Kavita+ Only</remarks>
public long MalId { get; set; } = 0;
/// <summary>
/// https://hardcover.app/authors/{HardcoverId}
/// </summary>
/// <remarks>Kavita+ Only</remarks>
public string? HardcoverId { get; set; }
}

View File

@ -0,0 +1,17 @@
using System.ComponentModel.DataAnnotations;
namespace API.DTOs;
public class UpdatePersonDto
{
[Required]
public int Id { get; init; }
[Required]
public bool CoverImageLocked { get; set; }
public string? Description { get; set; }
public int? AniListId { get; set; }
public long? MalId { get; set; }
public string? HardcoverId { get; set; }
public string? Asin { get; set; }
}

View File

@ -1,10 +0,0 @@
using API.Entities.Enums;
namespace API.DTOs;
public class PersonDto
{
public int Id { get; set; }
public required string Name { get; set; }
public PersonRole Role { get; set; }
}

View File

@ -0,0 +1,14 @@
using API.Entities.Enums;
namespace API.DTOs;
/// <summary>
/// Used on Person Profile page
/// </summary>
public class StandaloneChapterDto : ChapterDto
{
public int SeriesId { get; set; }
public int LibraryId { get; set; }
public LibraryType LibraryType { get; set; }
public string VolumeTitle { get; set; }
}

View File

@ -66,6 +66,8 @@ public sealed class DataContext : IdentityDbContext<AppUser, AppRole, int,
public DbSet<ManualMigrationHistory> ManualMigrationHistory { get; set; } = null!;
public DbSet<SeriesBlacklist> SeriesBlacklist { get; set; } = null!;
public DbSet<AppUserCollection> AppUserCollection { get; set; } = null!;
public DbSet<ChapterPeople> ChapterPeople { get; set; } = null!;
public DbSet<SeriesMetadataPeople> SeriesMetadataPeople { get; set; } = null!;
protected override void OnModelCreating(ModelBuilder builder)
@ -155,6 +157,36 @@ public sealed class DataContext : IdentityDbContext<AppUser, AppRole, int,
builder.Entity<AppUserCollection>()
.Property(b => b.AgeRating)
.HasDefaultValue(AgeRating.Unknown);
// Configure the many-to-many relationship for Movie and Person
builder.Entity<ChapterPeople>()
.HasKey(cp => new { cp.ChapterId, cp.PersonId, cp.Role });
builder.Entity<ChapterPeople>()
.HasOne(cp => cp.Chapter)
.WithMany(c => c.People)
.HasForeignKey(cp => cp.ChapterId);
builder.Entity<ChapterPeople>()
.HasOne(cp => cp.Person)
.WithMany(p => p.ChapterPeople)
.HasForeignKey(cp => cp.PersonId)
.OnDelete(DeleteBehavior.Cascade);
builder.Entity<SeriesMetadataPeople>()
.HasKey(smp => new { smp.SeriesMetadataId, smp.PersonId, smp.Role });
builder.Entity<SeriesMetadataPeople>()
.HasOne(smp => smp.SeriesMetadata)
.WithMany(sm => sm.People)
.HasForeignKey(smp => smp.SeriesMetadataId);
builder.Entity<SeriesMetadataPeople>()
.HasOne(smp => smp.Person)
.WithMany(p => p.SeriesMetadataPeople)
.HasForeignKey(smp => smp.PersonId)
.OnDelete(DeleteBehavior.Cascade);
}
#nullable enable

View File

@ -0,0 +1,51 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Services.Tasks.Scanner.Parser;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.8.3 still had a bug around LowestSeriesPath. This resets it for all users.
/// </summary>
public static class MigrateLowestSeriesFolderPath2
{
public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger<Program> logger)
{
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateLowestSeriesFolderPath2"))
{
return;
}
logger.LogCritical(
"Running MigrateLowestSeriesFolderPath2 migration - Please be patient, this may take some time. This is not an error");
var series = await dataContext.Series.Where(s => !string.IsNullOrEmpty(s.LowestFolderPath)).ToListAsync();
foreach (var s in series)
{
s.LowestFolderPath = string.Empty;
unitOfWork.SeriesRepository.Update(s);
}
// Save changes after processing all series
if (dataContext.ChangeTracker.HasChanges())
{
await dataContext.SaveChangesAsync();
}
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
{
Name = "MigrateLowestSeriesFolderPath2",
ProductVersion = BuildInfo.Version.ToString(),
RanAt = DateTime.UtcNow
});
await dataContext.SaveChangesAsync();
logger.LogCritical(
"Running MigrateLowestSeriesFolderPath2 migration - Completed. This is not an error");
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,91 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class PersonFields : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<int>(
name: "AniListId",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.AddColumn<string>(
name: "Asin",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "CoverImage",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<bool>(
name: "CoverImageLocked",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: false);
migrationBuilder.AddColumn<string>(
name: "Description",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "HardcoverId",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<long>(
name: "MalId",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: 0L);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "AniListId",
table: "Person");
migrationBuilder.DropColumn(
name: "Asin",
table: "Person");
migrationBuilder.DropColumn(
name: "CoverImage",
table: "Person");
migrationBuilder.DropColumn(
name: "CoverImageLocked",
table: "Person");
migrationBuilder.DropColumn(
name: "Description",
table: "Person");
migrationBuilder.DropColumn(
name: "HardcoverId",
table: "Person");
migrationBuilder.DropColumn(
name: "MalId",
table: "Person");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,159 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class PeopleOverhaulPart1 : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "ChapterPerson");
migrationBuilder.DropTable(
name: "PersonSeriesMetadata");
migrationBuilder.DropColumn(
name: "Role",
table: "Person");
migrationBuilder.CreateTable(
name: "ChapterPeople",
columns: table => new
{
ChapterId = table.Column<int>(type: "INTEGER", nullable: false),
PersonId = table.Column<int>(type: "INTEGER", nullable: false),
Role = table.Column<int>(type: "INTEGER", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_ChapterPeople", x => new { x.ChapterId, x.PersonId, x.Role });
table.ForeignKey(
name: "FK_ChapterPeople_Chapter_ChapterId",
column: x => x.ChapterId,
principalTable: "Chapter",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_ChapterPeople_Person_PersonId",
column: x => x.PersonId,
principalTable: "Person",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "SeriesMetadataPeople",
columns: table => new
{
SeriesMetadataId = table.Column<int>(type: "INTEGER", nullable: false),
PersonId = table.Column<int>(type: "INTEGER", nullable: false),
Role = table.Column<int>(type: "INTEGER", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_SeriesMetadataPeople", x => new { x.SeriesMetadataId, x.PersonId, x.Role });
table.ForeignKey(
name: "FK_SeriesMetadataPeople_Person_PersonId",
column: x => x.PersonId,
principalTable: "Person",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_SeriesMetadataPeople_SeriesMetadata_SeriesMetadataId",
column: x => x.SeriesMetadataId,
principalTable: "SeriesMetadata",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_ChapterPeople_PersonId",
table: "ChapterPeople",
column: "PersonId");
migrationBuilder.CreateIndex(
name: "IX_SeriesMetadataPeople_PersonId",
table: "SeriesMetadataPeople",
column: "PersonId");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "ChapterPeople");
migrationBuilder.DropTable(
name: "SeriesMetadataPeople");
migrationBuilder.AddColumn<int>(
name: "Role",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.CreateTable(
name: "ChapterPerson",
columns: table => new
{
ChapterMetadatasId = table.Column<int>(type: "INTEGER", nullable: false),
PeopleId = table.Column<int>(type: "INTEGER", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_ChapterPerson", x => new { x.ChapterMetadatasId, x.PeopleId });
table.ForeignKey(
name: "FK_ChapterPerson_Chapter_ChapterMetadatasId",
column: x => x.ChapterMetadatasId,
principalTable: "Chapter",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_ChapterPerson_Person_PeopleId",
column: x => x.PeopleId,
principalTable: "Person",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "PersonSeriesMetadata",
columns: table => new
{
PeopleId = table.Column<int>(type: "INTEGER", nullable: false),
SeriesMetadatasId = table.Column<int>(type: "INTEGER", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_PersonSeriesMetadata", x => new { x.PeopleId, x.SeriesMetadatasId });
table.ForeignKey(
name: "FK_PersonSeriesMetadata_Person_PeopleId",
column: x => x.PeopleId,
principalTable: "Person",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_PersonSeriesMetadata_SeriesMetadata_SeriesMetadatasId",
column: x => x.SeriesMetadatasId,
principalTable: "SeriesMetadata",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_ChapterPerson_PeopleId",
table: "ChapterPerson",
column: "PeopleId");
migrationBuilder.CreateIndex(
name: "IX_PersonSeriesMetadata_SeriesMetadatasId",
table: "PersonSeriesMetadata",
column: "SeriesMetadatasId");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,59 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class PeopleOverhaulPart2 : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "CoverImage",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<bool>(
name: "CoverImageLocked",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: false);
migrationBuilder.AddColumn<string>(
name: "PrimaryColor",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "SecondaryColor",
table: "Person",
type: "TEXT",
nullable: true);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "CoverImage",
table: "Person");
migrationBuilder.DropColumn(
name: "CoverImageLocked",
table: "Person");
migrationBuilder.DropColumn(
name: "PrimaryColor",
table: "Person");
migrationBuilder.DropColumn(
name: "SecondaryColor",
table: "Person");
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,70 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace API.Data.Migrations
{
/// <inheritdoc />
public partial class PeopleOverhaulPart3 : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<int>(
name: "AniListId",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: 0);
migrationBuilder.AddColumn<string>(
name: "Asin",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "Description",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<string>(
name: "HardcoverId",
table: "Person",
type: "TEXT",
nullable: true);
migrationBuilder.AddColumn<long>(
name: "MalId",
table: "Person",
type: "INTEGER",
nullable: false,
defaultValue: 0L);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "AniListId",
table: "Person");
migrationBuilder.DropColumn(
name: "Asin",
table: "Person");
migrationBuilder.DropColumn(
name: "Description",
table: "Person");
migrationBuilder.DropColumn(
name: "HardcoverId",
table: "Person");
migrationBuilder.DropColumn(
name: "MalId",
table: "Person");
}
}
}

View File

@ -901,6 +901,24 @@ namespace API.Data.Migrations
b.ToTable("Chapter");
});
modelBuilder.Entity("API.Entities.ChapterPeople", b =>
{
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<int>("PersonId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.HasKey("ChapterId", "PersonId", "Role");
b.HasIndex("PersonId");
b.ToTable("ChapterPeople");
});
modelBuilder.Entity("API.Entities.CollectionTag", b =>
{
b.Property<int>("Id")
@ -1531,14 +1549,38 @@ namespace API.Data.Migrations
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AniListId")
.HasColumnType("INTEGER");
b.Property<string>("Asin")
.HasColumnType("TEXT");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<bool>("CoverImageLocked")
.HasColumnType("INTEGER");
b.Property<string>("Description")
.HasColumnType("TEXT");
b.Property<string>("HardcoverId")
.HasColumnType("TEXT");
b.Property<long>("MalId")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.Property<string>("PrimaryColor")
.HasColumnType("TEXT");
b.Property<string>("SecondaryColor")
.HasColumnType("TEXT");
b.HasKey("Id");
@ -1903,6 +1945,24 @@ namespace API.Data.Migrations
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.SeriesMetadataPeople", b =>
{
b.Property<int>("SeriesMetadataId")
.HasColumnType("INTEGER");
b.Property<int>("PersonId")
.HasColumnType("INTEGER");
b.Property<int>("Role")
.HasColumnType("INTEGER");
b.HasKey("SeriesMetadataId", "PersonId", "Role");
b.HasIndex("PersonId");
b.ToTable("SeriesMetadataPeople");
});
modelBuilder.Entity("API.Entities.ServerSetting", b =>
{
b.Property<int>("Key")
@ -2149,21 +2209,6 @@ namespace API.Data.Migrations
b.ToTable("ChapterGenre");
});
modelBuilder.Entity("ChapterPerson", b =>
{
b.Property<int>("ChapterMetadatasId")
.HasColumnType("INTEGER");
b.Property<int>("PeopleId")
.HasColumnType("INTEGER");
b.HasKey("ChapterMetadatasId", "PeopleId");
b.HasIndex("PeopleId");
b.ToTable("ChapterPerson");
});
modelBuilder.Entity("ChapterTag", b =>
{
b.Property<int>("ChaptersId")
@ -2338,21 +2383,6 @@ namespace API.Data.Migrations
b.ToTable("AspNetUserTokens", (string)null);
});
modelBuilder.Entity("PersonSeriesMetadata", b =>
{
b.Property<int>("PeopleId")
.HasColumnType("INTEGER");
b.Property<int>("SeriesMetadatasId")
.HasColumnType("INTEGER");
b.HasKey("PeopleId", "SeriesMetadatasId");
b.HasIndex("SeriesMetadatasId");
b.ToTable("PersonSeriesMetadata");
});
modelBuilder.Entity("SeriesMetadataTag", b =>
{
b.Property<int>("SeriesMetadatasId")
@ -2600,6 +2630,25 @@ namespace API.Data.Migrations
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.ChapterPeople", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
.WithMany("People")
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Person", "Person")
.WithMany("ChapterPeople")
.HasForeignKey("PersonId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
b.Navigation("Person");
});
modelBuilder.Entity("API.Entities.Device", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
@ -2827,6 +2876,25 @@ namespace API.Data.Migrations
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.SeriesMetadataPeople", b =>
{
b.HasOne("API.Entities.Person", "Person")
.WithMany("SeriesMetadataPeople")
.HasForeignKey("PersonId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Metadata.SeriesMetadata", "SeriesMetadata")
.WithMany("People")
.HasForeignKey("SeriesMetadataId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Person");
b.Navigation("SeriesMetadata");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
@ -2883,21 +2951,6 @@ namespace API.Data.Migrations
.IsRequired();
});
modelBuilder.Entity("ChapterPerson", b =>
{
b.HasOne("API.Entities.Chapter", null)
.WithMany()
.HasForeignKey("ChapterMetadatasId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Person", null)
.WithMany()
.HasForeignKey("PeopleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("ChapterTag", b =>
{
b.HasOne("API.Entities.Chapter", null)
@ -3024,21 +3077,6 @@ namespace API.Data.Migrations
.IsRequired();
});
modelBuilder.Entity("PersonSeriesMetadata", b =>
{
b.HasOne("API.Entities.Person", null)
.WithMany()
.HasForeignKey("PeopleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Metadata.SeriesMetadata", null)
.WithMany()
.HasForeignKey("SeriesMetadatasId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("SeriesMetadataTag", b =>
{
b.HasOne("API.Entities.Metadata.SeriesMetadata", null)
@ -3096,6 +3134,8 @@ namespace API.Data.Migrations
{
b.Navigation("Files");
b.Navigation("People");
b.Navigation("UserProgress");
});
@ -3110,6 +3150,18 @@ namespace API.Data.Migrations
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Metadata.SeriesMetadata", b =>
{
b.Navigation("People");
});
modelBuilder.Entity("API.Entities.Person", b =>
{
b.Navigation("ChapterPeople");
b.Navigation("SeriesMetadataPeople");
});
modelBuilder.Entity("API.Entities.ReadingList", b =>
{
b.Navigation("Items");

View File

@ -60,6 +60,7 @@ public interface ICollectionTagRepository
Task<IEnumerable<AppUserCollection>> GetCollectionsByIds(IEnumerable<int> tags, CollectionIncludes includes = CollectionIncludes.None);
Task<IList<AppUserCollection>> GetAllCollectionsForSyncing(DateTime expirationTime);
}
public class CollectionTagRepository : ICollectionTagRepository
{
private readonly DataContext _context;
@ -195,8 +196,10 @@ public class CollectionTagRepository : ICollectionTagRepository
.Where(t => t.Id == tag.Id)
.SelectMany(uc => uc.Items.Select(s => s.Metadata))
.Select(sm => sm.AgeRating)
.MaxAsync();
tag.AgeRating = maxAgeRating;
.ToListAsync();
tag.AgeRating = maxAgeRating.Count != 0 ? maxAgeRating.Max() : AgeRating.Unknown;
await _context.SaveChangesAsync();
}
@ -219,7 +222,6 @@ public class CollectionTagRepository : ICollectionTagRepository
.ToListAsync();
}
public async Task<AppUserCollection?> GetCollectionAsync(int tagId, CollectionIncludes includes = CollectionIncludes.None)
{
return await _context.AppUserCollection

View File

@ -199,6 +199,7 @@ public class ExternalSeriesMetadataRepository : IExternalSeriesMetadataRepositor
.Where(r => EF.Functions.Like(r.Name, series.Name) ||
EF.Functions.Like(r.Name, series.LocalizedName))
.ToListAsync();
foreach (var rec in recMatches)
{
rec.SeriesId = series.Id;

View File

@ -6,6 +6,7 @@ using API.DTOs.Metadata;
using API.Entities;
using API.Extensions;
using API.Extensions.QueryExtensions;
using API.Services.Tasks.Scanner.Parser;
using AutoMapper;
using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore;
@ -24,6 +25,7 @@ public interface IGenreRepository
Task<int> GetCountAsync();
Task<GenreTagDto> GetRandomGenre();
Task<GenreTagDto> GetGenreById(int id);
Task<List<string>> GetAllGenresNotInListAsync(ICollection<string> genreNames);
}
public class GenreRepository : IGenreRepository
@ -133,4 +135,33 @@ public class GenreRepository : IGenreRepository
.ProjectTo<GenreTagDto>(_mapper.ConfigurationProvider)
.ToListAsync();
}
/// <summary>
/// Gets all genres that are not already present in the system.
/// Normalizes genres for lookup, but returns non-normalized names for creation.
/// </summary>
/// <param name="genreNames">The list of genre names (non-normalized).</param>
/// <returns>A list of genre names that do not exist in the system.</returns>
public async Task<List<string>> GetAllGenresNotInListAsync(ICollection<string> genreNames)
{
// Group the genres by their normalized names, keeping track of the original names
var normalizedToOriginalMap = genreNames
.Distinct()
.GroupBy(Parser.Normalize)
.ToDictionary(group => group.Key, group => group.First()); // Take the first original name for each normalized name
var normalizedGenreNames = normalizedToOriginalMap.Keys.ToList();
// Query the database for existing genres using the normalized names
var existingGenres = await _context.Genre
.Where(g => normalizedGenreNames.Contains(g.NormalizedTitle)) // Assuming you have a normalized field
.Select(g => g.NormalizedTitle)
.ToListAsync();
// Find the normalized genres that do not exist in the database
var missingGenres = normalizedGenreNames.Except(existingGenres).ToList();
// Return the original non-normalized genres for the missing ones
return missingGenres.Select(normalizedName => normalizedToOriginalMap[normalizedName]).ToList();
}
}

View File

@ -1,4 +1,6 @@
using System.Collections.Generic;
using System.Collections;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
@ -6,6 +8,7 @@ using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Extensions.QueryExtensions;
using API.Helpers;
using AutoMapper;
using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore;
@ -15,7 +18,12 @@ namespace API.Data.Repositories;
public interface IPersonRepository
{
void Attach(Person person);
void Attach(IEnumerable<Person> person);
void Remove(Person person);
void Remove(ChapterPeople person);
void Remove(SeriesMetadataPeople person);
void Update(Person person);
Task<IList<Person>> GetAllPeople();
Task<IList<PersonDto>> GetAllPersonDtosAsync(int userId);
Task<IList<PersonDto>> GetAllPersonDtosByRoleAsync(int userId, PersonRole role);
@ -23,7 +31,17 @@ public interface IPersonRepository
Task<IList<PersonDto>> GetAllPeopleDtosForLibrariesAsync(int userId, List<int>? libraryIds = null);
Task<int> GetCountAsync();
Task<IList<Person>> GetAllPeopleByRoleAndNames(PersonRole role, IEnumerable<string> normalizeNames);
Task<string> GetCoverImageAsync(int personId);
Task<string?> GetCoverImageByNameAsync(string name);
Task<IEnumerable<PersonRole>> GetRolesForPersonByName(string name, int userId);
Task<PagedList<BrowsePersonDto>> GetAllWritersAndSeriesCount(int userId, UserParams userParams);
Task<Person?> GetPersonById(int personId);
Task<PersonDto?> GetPersonDtoByName(string name, int userId);
Task<Person> GetPersonByName(string name);
Task<IEnumerable<SeriesDto>> GetSeriesKnownFor(int personId);
Task<IEnumerable<StandaloneChapterDto>> GetChaptersForPersonByRole(int personId, int userId, PersonRole role);
Task<IList<Person>> GetPeopleByNames(List<string> normalizedNames);
}
public class PersonRepository : IPersonRepository
@ -42,17 +60,37 @@ public class PersonRepository : IPersonRepository
_context.Person.Attach(person);
}
public void Attach(IEnumerable<Person> person)
{
_context.Person.AttachRange(person);
}
public void Remove(Person person)
{
_context.Person.Remove(person);
}
public void Remove(ChapterPeople person)
{
_context.ChapterPeople.Remove(person);
}
public void Remove(SeriesMetadataPeople person)
{
_context.SeriesMetadataPeople.Remove(person);
}
public void Update(Person person)
{
_context.Person.Update(person);
}
public async Task RemoveAllPeopleNoLongerAssociated()
{
var peopleWithNoConnections = await _context.Person
.Include(p => p.SeriesMetadatas)
.Include(p => p.ChapterMetadatas)
.Where(p => p.SeriesMetadatas.Count == 0 && p.ChapterMetadatas.Count == 0)
.Include(p => p.SeriesMetadataPeople)
.Include(p => p.ChapterPeople)
.Where(p => p.SeriesMetadataPeople.Count == 0 && p.ChapterPeople.Count == 0)
.AsSplitQuery()
.ToListAsync();
@ -61,6 +99,7 @@ public class PersonRepository : IPersonRepository
await _context.SaveChangesAsync();
}
public async Task<IList<PersonDto>> GetAllPeopleDtosForLibrariesAsync(int userId, List<int>? libraryIds = null)
{
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
@ -74,7 +113,7 @@ public class PersonRepository : IPersonRepository
return await _context.Series
.Where(s => userLibs.Contains(s.LibraryId))
.RestrictAgainstAgeRestriction(ageRating)
.SelectMany(s => s.Metadata.People)
.SelectMany(s => s.Metadata.People.Select(p => p.Person))
.Distinct()
.OrderBy(p => p.Name)
.AsNoTracking()
@ -88,13 +127,124 @@ public class PersonRepository : IPersonRepository
return await _context.Person.CountAsync();
}
public async Task<IList<Person>> GetAllPeopleByRoleAndNames(PersonRole role, IEnumerable<string> normalizeNames)
public async Task<string> GetCoverImageAsync(int personId)
{
return await _context.Person
.Where(p => p.Role == role && normalizeNames.Contains(p.NormalizedName))
.Where(c => c.Id == personId)
.Select(c => c.CoverImage)
.SingleOrDefaultAsync();
}
public async Task<string> GetCoverImageByNameAsync(string name)
{
var normalized = name.ToNormalized();
return await _context.Person
.Where(c => c.NormalizedName == normalized)
.Select(c => c.CoverImage)
.SingleOrDefaultAsync();
}
public async Task<IEnumerable<PersonRole>> GetRolesForPersonByName(string name, int userId)
{
// TODO: This will need to check both series and chapters (in cases where komf only updates series)
var normalized = name.ToNormalized();
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
return await _context.Person
.Where(p => p.NormalizedName == normalized)
.RestrictAgainstAgeRestriction(ageRating)
.SelectMany(p => p.ChapterPeople.Select(cp => cp.Role))
.Distinct()
.ToListAsync();
}
public async Task<PagedList<BrowsePersonDto>> GetAllWritersAndSeriesCount(int userId, UserParams userParams)
{
List<PersonRole> roles = [PersonRole.Writer, PersonRole.CoverArtist];
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
var query = _context.Person
.Where(p => p.SeriesMetadataPeople.Any(smp => roles.Contains(smp.Role)) || p.ChapterPeople.Any(cmp => roles.Contains(cmp.Role)))
.RestrictAgainstAgeRestriction(ageRating)
.Select(p => new BrowsePersonDto
{
Id = p.Id,
Name = p.Name,
Description = p.Description,
SeriesCount = p.SeriesMetadataPeople
.Where(smp => roles.Contains(smp.Role))
.Select(smp => smp.SeriesMetadata.SeriesId)
.Distinct()
.Count(),
IssueCount = p.ChapterPeople
.Where(cp => roles.Contains(cp.Role))
.Select(cp => cp.Chapter.Id)
.Distinct()
.Count()
})
.OrderBy(p => p.Name);
return await PagedList<BrowsePersonDto>.CreateAsync(query, userParams.PageNumber, userParams.PageSize);
}
public async Task<Person?> GetPersonById(int personId)
{
return await _context.Person.Where(p => p.Id == personId)
.FirstOrDefaultAsync();
}
public async Task<PersonDto> GetPersonDtoByName(string name, int userId)
{
var normalized = name.ToNormalized();
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
return await _context.Person
.Where(p => p.NormalizedName == normalized)
.RestrictAgainstAgeRestriction(ageRating)
.ProjectTo<PersonDto>(_mapper.ConfigurationProvider)
.FirstOrDefaultAsync();
}
public async Task<Person> GetPersonByName(string name)
{
return await _context.Person.FirstOrDefaultAsync(p => p.NormalizedName == name.ToNormalized());
}
public async Task<IEnumerable<SeriesDto>> GetSeriesKnownFor(int personId)
{
return await _context.Person
.Where(p => p.Id == personId)
.SelectMany(p => p.SeriesMetadataPeople)
.Select(smp => smp.SeriesMetadata)
.Select(sm => sm.Series)
.Distinct()
.OrderByDescending(s => s.ExternalSeriesMetadata.AverageExternalRating)
.Take(20)
.ProjectTo<SeriesDto>(_mapper.ConfigurationProvider)
.ToListAsync();
}
public async Task<IEnumerable<StandaloneChapterDto>> GetChaptersForPersonByRole(int personId, int userId, PersonRole role)
{
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
return await _context.ChapterPeople
.Where(cp => cp.PersonId == personId && cp.Role == role)
.Select(cp => cp.Chapter)
.RestrictAgainstAgeRestriction(ageRating)
.OrderBy(ch => ch.SortOrder)
.Take(20)
.ProjectTo<StandaloneChapterDto>(_mapper.ConfigurationProvider)
.ToListAsync();
}
public async Task<IList<Person>> GetPeopleByNames(List<string> normalizedNames)
{
return await _context.Person
.Where(p => normalizedNames.Contains(p.NormalizedName))
.OrderBy(p => p.Name)
.ToListAsync();
}
public async Task<IList<Person>> GetAllPeople()
{
@ -106,7 +256,7 @@ public class PersonRepository : IPersonRepository
public async Task<IList<PersonDto>> GetAllPersonDtosAsync(int userId)
{
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
var libraryIds = await _context.Library.GetUserLibraries(userId).ToListAsync();
return await _context.Person
.OrderBy(p => p.Name)
.RestrictAgainstAgeRestriction(ageRating)
@ -117,8 +267,9 @@ public class PersonRepository : IPersonRepository
public async Task<IList<PersonDto>> GetAllPersonDtosByRoleAsync(int userId, PersonRole role)
{
var ageRating = await _context.AppUser.GetUserAgeRestriction(userId);
return await _context.Person
.Where(p => p.Role == role)
.Where(p => p.SeriesMetadataPeople.Any(smp => smp.Role == role) || p.ChapterPeople.Any(cp => cp.Role == role)) // Filter by role in both series and chapters
.OrderBy(p => p.Name)
.RestrictAgainstAgeRestriction(ageRating)
.ProjectTo<PersonDto>(_mapper.ConfigurationProvider)

View File

@ -122,8 +122,10 @@ public class ReadingListRepository : IReadingListRepository
{
return _context.ReadingListItem
.Where(item => item.ReadingListId == readingListId)
.SelectMany(item => item.Chapter.People.Where(p => p.Role == PersonRole.Character))
.OrderBy(p => p.NormalizedName)
.SelectMany(item => item.Chapter.People)
.Where(p => p.Role == PersonRole.Character)
.OrderBy(p => p.Person.NormalizedName)
.Select(p => p.Person)
.Distinct()
.ProjectTo<PersonDto>(_mapper.ConfigurationProvider)
.AsEnumerable();

View File

@ -44,6 +44,9 @@ public enum SeriesIncludes
{
None = 1,
Volumes = 2,
/// <summary>
/// This will include all necessary includes
/// </summary>
Metadata = 4,
Related = 8,
Library = 16,
@ -51,8 +54,7 @@ public enum SeriesIncludes
ExternalReviews = 64,
ExternalRatings = 128,
ExternalRecommendations = 256,
ExternalMetadata = 512
ExternalMetadata = 512,
}
/// <summary>
@ -138,7 +140,7 @@ public interface ISeriesRepository
Task<IList<Series>> GetWantToReadForUserAsync(int userId);
Task<bool> IsSeriesInWantToRead(int userId, int seriesId);
Task<Series?> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None);
Task<Series?> GetSeriesThatContainsLowestFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None);
Task<Series?> GetSeriesThatContainsLowestFolderPath(string path, SeriesIncludes includes = SeriesIncludes.None);
Task<IEnumerable<Series>> GetAllSeriesByNameAsync(IList<string> normalizedNames,
int userId, SeriesIncludes includes = SeriesIncludes.None);
Task<Series?> GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId, MangaFormat format, bool withFullIncludes = true);
@ -363,11 +365,11 @@ public class SeriesRepository : ISeriesRepository
var searchQueryNormalized = searchQuery.ToNormalized();
var userRating = await _context.AppUser.GetUserAgeRestriction(userId);
var seriesIds = _context.Series
var seriesIds = await _context.Series
.Where(s => libraryIds.Contains(s.LibraryId))
.RestrictAgainstAgeRestriction(userRating)
.Select(s => s.Id)
.ToList();
.ToListAsync();
result.Libraries = await _context.Library
.Search(searchQuery, userId, libraryIds)
@ -440,6 +442,7 @@ public class SeriesRepository : ISeriesRepository
.SearchPeople(searchQuery, seriesIds)
.Take(maxRecords)
.OrderBy(t => t.NormalizedName)
.Distinct()
.ProjectTo<PersonDto>(_mapper.ConfigurationProvider)
.ToListAsync();
@ -532,14 +535,6 @@ public class SeriesRepository : ISeriesRepository
.SingleOrDefaultAsync();
}
public async Task<Series?> GetSeriesByIdForUserAsync(int seriesId, int userId, SeriesIncludes includes = SeriesIncludes.Volumes | SeriesIncludes.Metadata)
{
return await _context.Series
.Where(s => s.Id == seriesId)
.Includes(includes)
.SingleOrDefaultAsync();
}
/// <summary>
/// Returns Full Series including all external links
/// </summary>
@ -661,6 +656,7 @@ public class SeriesRepository : ISeriesRepository
.Include(m => m.Genres.OrderBy(g => g.NormalizedTitle))
.Include(m => m.Tags.OrderBy(g => g.NormalizedTitle))
.Include(m => m.People)
.ThenInclude(p => p.Person)
.AsNoTracking()
.ProjectTo<SeriesMetadataDto>(_mapper.ConfigurationProvider)
.AsSplitQuery()
@ -1273,7 +1269,7 @@ public class SeriesRepository : ISeriesRepository
var query = sQuery
.WhereIf(hasGenresFilter, s => s.Metadata.Genres.Any(g => filter.Genres.Contains(g.Id)))
.WhereIf(hasPeopleFilter, s => s.Metadata.People.Any(p => allPeopleIds.Contains(p.Id)))
.WhereIf(hasPeopleFilter, s => s.Metadata.People.Any(p => allPeopleIds.Contains(p.PersonId)))
.WhereIf(hasCollectionTagFilter,
s => s.Metadata.CollectionTags.Any(t => filter.CollectionTags.Contains(t.Id)))
.WhereIf(hasRatingFilter, s => s.Ratings.Any(r => r.Rating >= filter.Rating && r.AppUserId == userId))
@ -1302,6 +1298,7 @@ public class SeriesRepository : ISeriesRepository
.Include(m => m.Genres.OrderBy(g => g.NormalizedTitle))
.Include(m => m.Tags.OrderBy(g => g.NormalizedTitle))
.Include(m => m.People)
.ThenInclude(p => p.Person)
.AsNoTracking()
.ProjectTo<SeriesMetadataDto>(_mapper.ConfigurationProvider)
.AsSplitQuery()
@ -1606,9 +1603,24 @@ public class SeriesRepository : ISeriesRepository
.SingleOrDefaultAsync();
}
public async Task<Series?> GetSeriesThatContainsLowestFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None)
public async Task<Series?> GetSeriesThatContainsLowestFolderPath(string path, SeriesIncludes includes = SeriesIncludes.None)
{
var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder);
// Check if the path ends with a file (has a file extension)
string directoryPath;
if (Path.HasExtension(path))
{
// Remove the file part and get the directory path
directoryPath = Path.GetDirectoryName(path);
if (string.IsNullOrEmpty(directoryPath)) return null;
}
else
{
// Use the path as is if it doesn't end with a file
directoryPath = path;
}
// Normalize the directory path
var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(directoryPath);
if (string.IsNullOrEmpty(normalized)) return null;
normalized = normalized.TrimEnd('/');
@ -1672,6 +1684,7 @@ public class SeriesRepository : ISeriesRepository
.Include(s => s.Metadata)
.ThenInclude(m => m.People)
.ThenInclude(p => p.Person)
.Include(s => s.Metadata)
.ThenInclude(m => m.Genres)
@ -1682,6 +1695,7 @@ public class SeriesRepository : ISeriesRepository
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(cm => cm.People)
.ThenInclude(p => p.Person)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
@ -1697,6 +1711,7 @@ public class SeriesRepository : ISeriesRepository
.AsSplitQuery();
return query.SingleOrDefaultAsync();
#nullable enable
}
@ -1705,6 +1720,7 @@ public class SeriesRepository : ISeriesRepository
var libraryIds = GetLibraryIdsForUser(userId);
var normalizedSeries = seriesName.ToNormalized();
var normalizedLocalized = localizedName.ToNormalized();
return await _context.Series
.Where(s => libraryIds.Contains(s.LibraryId))
.Where(s => formats.Contains(s.Format))
@ -1749,45 +1765,36 @@ public class SeriesRepository : ISeriesRepository
/// <param name="libraryId"></param>
public async Task<IList<Series>> RemoveSeriesNotInList(IList<ParsedSeries> seenSeries, int libraryId)
{
if (seenSeries.Count == 0) return Array.Empty<Series>();
if (!seenSeries.Any()) return Array.Empty<Series>();
// Get all series from DB in one go, based on libraryId
var dbSeries = await _context.Series
.Where(s => s.LibraryId == libraryId)
.ToListAsync();
// Get a set of matching series ids for the given parsedSeries
var ids = new HashSet<int>();
var ids = new List<int>();
foreach (var parsedSeries in seenSeries)
{
try
var matchingSeries = dbSeries
.Where(s => s.Format == parsedSeries.Format && s.NormalizedName == parsedSeries.NormalizedName)
.OrderBy(s => s.Id) // Sort to handle potential duplicates
.ToList();
// Prefer the first match or handle duplicates by choosing the last one
if (matchingSeries.Any())
{
var seriesId = await _context.Series
.Where(s => s.Format == parsedSeries.Format && s.NormalizedName == parsedSeries.NormalizedName &&
s.LibraryId == libraryId)
.Select(s => s.Id)
.SingleOrDefaultAsync();
if (seriesId > 0)
{
ids.Add(seriesId);
}
}
catch (Exception)
{
// This is due to v0.5.6 introducing bugs where we could have multiple series get duplicated and no way to delete them
// This here will delete the 2nd one as the first is the one to likely be used.
var sId = await _context.Series
.Where(s => s.Format == parsedSeries.Format && s.NormalizedName == parsedSeries.NormalizedName &&
s.LibraryId == libraryId)
.Select(s => s.Id)
.OrderBy(s => s)
.LastAsync();
if (sId > 0)
{
ids.Add(sId);
}
ids.Add(matchingSeries.Last().Id);
}
}
var seriesToRemove = await _context.Series
.Where(s => s.LibraryId == libraryId)
// Filter out series that are not in the seenSeries
var seriesToRemove = dbSeries
.Where(s => !ids.Contains(s.Id))
.ToListAsync();
.ToList();
// Remove series in bulk
_context.Series.RemoveRange(seriesToRemove);
return seriesToRemove;

View File

@ -5,6 +5,7 @@ using API.DTOs.Metadata;
using API.Entities;
using API.Extensions;
using API.Extensions.QueryExtensions;
using API.Services.Tasks.Scanner.Parser;
using AutoMapper;
using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore;
@ -20,6 +21,7 @@ public interface ITagRepository
Task<IList<TagDto>> GetAllTagDtosAsync(int userId);
Task RemoveAllTagNoLongerAssociated();
Task<IList<TagDto>> GetAllTagDtosForLibrariesAsync(int userId, IList<int>? libraryIds = null);
Task<List<string>> GetAllTagsNotInListAsync(ICollection<string> tags);
}
public class TagRepository : ITagRepository
@ -79,6 +81,28 @@ public class TagRepository : ITagRepository
.ToListAsync();
}
public async Task<List<string>> GetAllTagsNotInListAsync(ICollection<string> tags)
{
// Create a dictionary mapping normalized names to non-normalized names
var normalizedToOriginalMap = tags.Distinct()
.GroupBy(Parser.Normalize)
.ToDictionary(group => group.Key, group => group.First());
var normalizedTagNames = normalizedToOriginalMap.Keys.ToList();
// Query the database for existing genres using the normalized names
var existingTags = await _context.Tag
.Where(g => normalizedTagNames.Contains(g.NormalizedTitle)) // Assuming you have a normalized field
.Select(g => g.NormalizedTitle)
.ToListAsync();
// Find the normalized genres that do not exist in the database
var missingTags = normalizedTagNames.Except(existingTags).ToList();
// Return the original non-normalized genres for the missing ones
return missingTags.Select(normalizedName => normalizedToOriginalMap[normalizedName]).ToList();
}
public async Task<IList<Tag>> GetAllTagsAsync()
{
return await _context.Tag.ToListAsync();

View File

@ -114,6 +114,14 @@ public static class Seed
Order = 5,
IsProvided = true,
Visible = true
},
new AppUserSideNavStream()
{
Name = "browse-authors",
StreamType = SideNavStreamType.BrowseAuthors,
Order = 6,
IsProvided = true,
Visible = true
});
@ -183,10 +191,10 @@ public static class Seed
var allUsers = await unitOfWork.UserRepository.GetAllUsersAsync(AppUserIncludes.SideNavStreams);
foreach (var user in allUsers)
{
if (user.SideNavStreams.Count != 0) continue;
user.SideNavStreams ??= new List<AppUserSideNavStream>();
foreach (var defaultStream in DefaultSideNavStreams)
{
if (user.SideNavStreams.Any(s => s.Name == defaultStream.Name && s.StreamType == defaultStream.StreamType)) continue;
var newStream = new AppUserSideNavStream()
{
Name = defaultStream.Name,

View File

@ -9,6 +9,7 @@ namespace API.Data;
public interface IUnitOfWork
{
DataContext DataContext { get; }
ISeriesRepository SeriesRepository { get; }
IUserRepository UserRepository { get; }
ILibraryRepository LibraryRepository { get; }
@ -36,6 +37,7 @@ public interface IUnitOfWork
bool HasChanges();
Task<bool> RollbackAsync();
}
public class UnitOfWork : IUnitOfWork
{
private readonly DataContext _context;
@ -47,33 +49,57 @@ public class UnitOfWork : IUnitOfWork
_context = context;
_mapper = mapper;
_userManager = userManager;
SeriesRepository = new SeriesRepository(_context, _mapper, _userManager);
UserRepository = new UserRepository(_context, _userManager, _mapper);
LibraryRepository = new LibraryRepository(_context, _mapper);
VolumeRepository = new VolumeRepository(_context, _mapper);
SettingsRepository = new SettingsRepository(_context, _mapper);
AppUserProgressRepository = new AppUserProgressRepository(_context, _mapper);
CollectionTagRepository = new CollectionTagRepository(_context, _mapper);
ChapterRepository = new ChapterRepository(_context, _mapper);
ReadingListRepository = new ReadingListRepository(_context, _mapper);
SeriesMetadataRepository = new SeriesMetadataRepository(_context);
PersonRepository = new PersonRepository(_context, _mapper);
GenreRepository = new GenreRepository(_context, _mapper);
TagRepository = new TagRepository(_context, _mapper);
SiteThemeRepository = new SiteThemeRepository(_context, _mapper);
MangaFileRepository = new MangaFileRepository(_context);
DeviceRepository = new DeviceRepository(_context, _mapper);
MediaErrorRepository = new MediaErrorRepository(_context, _mapper);
ScrobbleRepository = new ScrobbleRepository(_context, _mapper);
UserTableOfContentRepository = new UserTableOfContentRepository(_context, _mapper);
AppUserSmartFilterRepository = new AppUserSmartFilterRepository(_context, _mapper);
AppUserExternalSourceRepository = new AppUserExternalSourceRepository(_context, _mapper);
ExternalSeriesMetadataRepository = new ExternalSeriesMetadataRepository(_context, _mapper);
}
public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper, _userManager);
public IUserRepository UserRepository => new UserRepository(_context, _userManager, _mapper);
public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper);
public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper);
public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper);
public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context, _mapper);
public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper);
public IChapterRepository ChapterRepository => new ChapterRepository(_context, _mapper);
public IReadingListRepository ReadingListRepository => new ReadingListRepository(_context, _mapper);
public ISeriesMetadataRepository SeriesMetadataRepository => new SeriesMetadataRepository(_context);
public IPersonRepository PersonRepository => new PersonRepository(_context, _mapper);
public IGenreRepository GenreRepository => new GenreRepository(_context, _mapper);
public ITagRepository TagRepository => new TagRepository(_context, _mapper);
public ISiteThemeRepository SiteThemeRepository => new SiteThemeRepository(_context, _mapper);
public IMangaFileRepository MangaFileRepository => new MangaFileRepository(_context);
public IDeviceRepository DeviceRepository => new DeviceRepository(_context, _mapper);
public IMediaErrorRepository MediaErrorRepository => new MediaErrorRepository(_context, _mapper);
public IScrobbleRepository ScrobbleRepository => new ScrobbleRepository(_context, _mapper);
public IUserTableOfContentRepository UserTableOfContentRepository => new UserTableOfContentRepository(_context, _mapper);
public IAppUserSmartFilterRepository AppUserSmartFilterRepository => new AppUserSmartFilterRepository(_context, _mapper);
public IAppUserExternalSourceRepository AppUserExternalSourceRepository => new AppUserExternalSourceRepository(_context, _mapper);
public IExternalSeriesMetadataRepository ExternalSeriesMetadataRepository => new ExternalSeriesMetadataRepository(_context, _mapper);
/// <summary>
/// This is here for Scanner only. Don't use otherwise.
/// </summary>
public DataContext DataContext => _context;
public ISeriesRepository SeriesRepository { get; }
public IUserRepository UserRepository { get; }
public ILibraryRepository LibraryRepository { get; }
public IVolumeRepository VolumeRepository { get; }
public ISettingsRepository SettingsRepository { get; }
public IAppUserProgressRepository AppUserProgressRepository { get; }
public ICollectionTagRepository CollectionTagRepository { get; }
public IChapterRepository ChapterRepository { get; }
public IReadingListRepository ReadingListRepository { get; }
public ISeriesMetadataRepository SeriesMetadataRepository { get; }
public IPersonRepository PersonRepository { get; }
public IGenreRepository GenreRepository { get; }
public ITagRepository TagRepository { get; }
public ISiteThemeRepository SiteThemeRepository { get; }
public IMangaFileRepository MangaFileRepository { get; }
public IDeviceRepository DeviceRepository { get; }
public IMediaErrorRepository MediaErrorRepository { get; }
public IScrobbleRepository ScrobbleRepository { get; }
public IUserTableOfContentRepository UserTableOfContentRepository { get; }
public IAppUserSmartFilterRepository AppUserSmartFilterRepository { get; }
public IAppUserExternalSourceRepository AppUserExternalSourceRepository { get; }
public IExternalSeriesMetadataRepository ExternalSeriesMetadataRepository { get; }
/// <summary>
/// Commits changes to the DB. Completes the open transaction.

View File

@ -153,7 +153,7 @@ public class Chapter : IEntityDate, IHasReadTimeEstimate, IHasCoverImage
/// <summary>
/// All people attached at a Chapter level. Usually Comics will have different people per issue.
/// </summary>
public ICollection<Person> People { get; set; } = new List<Person>();
public ICollection<ChapterPeople> People { get; set; } = new List<ChapterPeople>();
/// <summary>
/// Genres for the Chapter
/// </summary>

View File

@ -14,16 +14,6 @@ public class SeriesMetadata : IHasConcurrencyToken
public string Summary { get; set; } = string.Empty;
[Obsolete("Use AppUserCollection instead")]
public ICollection<CollectionTag> CollectionTags { get; set; } = new List<CollectionTag>();
public ICollection<Genre> Genres { get; set; } = new List<Genre>();
public ICollection<Tag> Tags { get; set; } = new List<Tag>();
/// <summary>
/// All people attached at a Series level.
/// </summary>
public ICollection<Person> People { get; set; } = new List<Person>();
/// <summary>
/// Highest Age Rating from all Chapters
/// </summary>
@ -51,7 +41,8 @@ public class SeriesMetadata : IHasConcurrencyToken
/// <remarks>This is not populated from Chapters of the Series</remarks>
public string WebLinks { get; set; } = string.Empty;
// Locks
#region Locks
public bool LanguageLocked { get; set; }
public bool SummaryLocked { get; set; }
/// <summary>
@ -79,9 +70,26 @@ public class SeriesMetadata : IHasConcurrencyToken
public bool CoverArtistLocked { get; set; }
public bool ReleaseYearLocked { get; set; }
// Relationship
public Series Series { get; set; } = null!;
#endregion
#region Relationships
[Obsolete("Use AppUserCollection instead")]
public ICollection<CollectionTag> CollectionTags { get; set; } = new List<CollectionTag>();
public ICollection<Genre> Genres { get; set; } = new List<Genre>();
public ICollection<Tag> Tags { get; set; } = new List<Tag>();
/// <summary>
/// All people attached at a Series level.
/// </summary>
public ICollection<SeriesMetadataPeople> People { get; set; } = new List<SeriesMetadataPeople>();
public int SeriesId { get; set; }
public Series Series { get; set; } = null!;
#endregion
/// <inheritdoc />
[ConcurrencyCheck]

View File

@ -1,17 +0,0 @@
using System.Collections.Generic;
using API.Entities.Enums;
using API.Entities.Metadata;
namespace API.Entities;
public class Person
{
public int Id { get; set; }
public required string Name { get; set; }
public required string NormalizedName { get; set; }
public required PersonRole Role { get; set; }
// Relationships
public ICollection<SeriesMetadata> SeriesMetadatas { get; set; } = null!;
public ICollection<Chapter> ChapterMetadatas { get; set; } = null!;
}

View File

@ -0,0 +1,14 @@
using API.Entities.Enums;
namespace API.Entities;
public class ChapterPeople
{
public int ChapterId { get; set; }
public virtual Chapter Chapter { get; set; }
public int PersonId { get; set; }
public virtual Person Person { get; set; }
public required PersonRole Role { get; set; }
}

View File

@ -0,0 +1,60 @@
using System.Collections.Generic;
using API.Entities.Enums;
using API.Entities.Interfaces;
using API.Entities.Metadata;
namespace API.Entities;
public class Person : IHasCoverImage
{
public int Id { get; set; }
public required string Name { get; set; }
public required string NormalizedName { get; set; }
//public ICollection<PersonAlias> Aliases { get; set; } = default!;
public string? CoverImage { get; set; }
public bool CoverImageLocked { get; set; }
public string PrimaryColor { get; set; }
public string SecondaryColor { get; set; }
public string Description { get; set; }
/// <summary>
/// ASIN for person
/// </summary>
/// <remarks>Can be used for Amazon author lookup</remarks>
public string? Asin { get; set; }
/// <summary>
/// https://anilist.co/staff/{AniListId}/
/// </summary>
/// <remarks>Kavita+ Only</remarks>
public int AniListId { get; set; } = 0;
/// <summary>
/// https://myanimelist.net/people/{MalId}/
/// https://myanimelist.net/character/{MalId}/CharacterName
/// </summary>
/// <remarks>Kavita+ Only</remarks>
public long MalId { get; set; } = 0;
/// <summary>
/// https://hardcover.app/authors/{HardcoverId}
/// </summary>
/// <remarks>Kavita+ Only</remarks>
public string? HardcoverId { get; set; }
/// <summary>
/// https://metron.cloud/creator/{slug}/
/// </summary>
/// <remarks>Kavita+ Only</remarks>
//public long MetronId { get; set; } = 0;
// Relationships
public ICollection<ChapterPeople> ChapterPeople { get; set; } = new List<ChapterPeople>();
public ICollection<SeriesMetadataPeople> SeriesMetadataPeople { get; set; } = new List<SeriesMetadataPeople>();
public void ResetColorScape()
{
PrimaryColor = string.Empty;
SecondaryColor = string.Empty;
}
}

View File

@ -0,0 +1,15 @@
using API.Entities.Enums;
using API.Entities.Metadata;
namespace API.Entities;
public class SeriesMetadataPeople
{
public int SeriesMetadataId { get; set; }
public virtual SeriesMetadata SeriesMetadata { get; set; }
public int PersonId { get; set; }
public virtual Person Person { get; set; }
public required PersonRole Role { get; set; }
}

View File

@ -10,4 +10,5 @@ public enum SideNavStreamType
ExternalSource = 6,
AllSeries = 7,
WantToRead = 8,
BrowseAuthors = 9
}

View File

@ -24,6 +24,8 @@ public static class ApplicationServiceExtensions
{
services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly);
//services.AddScoped<DataContext>();
services.AddScoped<IUnitOfWork, UnitOfWork>();
services.AddScoped<ITokenService, TokenService>();
services.AddScoped<IFileService, FileService>();
@ -45,7 +47,6 @@ public static class ApplicationServiceExtensions
services.AddScoped<IBookmarkService, BookmarkService>();
services.AddScoped<IThemeService, ThemeService>();
services.AddScoped<ISeriesService, SeriesService>();
services.AddScoped<IProcessSeries, ProcessSeries>();
services.AddScoped<IReadingListService, ReadingListService>();
services.AddScoped<IDeviceService, DeviceService>();
services.AddScoped<IStatisticService, StatisticService>();
@ -55,12 +56,12 @@ public static class ApplicationServiceExtensions
services.AddScoped<IStreamService, StreamService>();
services.AddScoped<IScannerService, ScannerService>();
services.AddScoped<IProcessSeries, ProcessSeries>();
services.AddScoped<IMetadataService, MetadataService>();
services.AddScoped<IWordCountAnalyzerService, WordCountAnalyzerService>();
services.AddScoped<ILibraryWatcher, LibraryWatcher>();
services.AddScoped<ITachiyomiService, TachiyomiService>();
services.AddScoped<ICollectionTagService, CollectionTagService>();
services.AddScoped<ITagManagerService, TagManagerService>();
services.AddScoped<IFileSystem, FileSystem>();
services.AddScoped<IDirectoryService, DirectoryService>();

View File

@ -4,6 +4,7 @@ using API.Data.Misc;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Metadata;
using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore;
namespace API.Extensions.QueryExtensions.Filtering;
@ -45,10 +46,25 @@ public static class SearchQueryableExtensions
public static IQueryable<Person> SearchPeople(this IQueryable<SeriesMetadata> queryable,
string searchQuery, IEnumerable<int> seriesIds)
{
return queryable
// Get people from SeriesMetadata
var peopleFromSeriesMetadata = queryable
.Where(sm => seriesIds.Contains(sm.SeriesId))
.SelectMany(sm => sm.People.Where(t => t.Name != null && EF.Functions.Like(t.Name, $"%{searchQuery}%")))
.AsSplitQuery()
.SelectMany(sm => sm.People)
.Where(p => p.Person.Name != null && EF.Functions.Like(p.Person.Name, $"%{searchQuery}%"))
.Select(p => p.Person);
// Get people from ChapterPeople by navigating through Volume -> Series
var peopleFromChapterPeople = queryable
.Where(sm => seriesIds.Contains(sm.SeriesId))
.SelectMany(sm => sm.Series.Volumes)
.SelectMany(v => v.Chapters)
.SelectMany(ch => ch.People)
.Where(cp => cp.Person.Name != null && EF.Functions.Like(cp.Person.Name, $"%{searchQuery}%"))
.Select(cp => cp.Person);
// Combine both queries and ensure distinct results
return peopleFromSeriesMetadata
.Union(peopleFromChapterPeople)
.Distinct()
.OrderBy(p => p.NormalizedName);
}

View File

@ -471,22 +471,22 @@ public static class SeriesFilter
{
case FilterComparison.Equal:
case FilterComparison.Contains:
return queryable.Where(s => s.Metadata.People.Any(p => people.Contains(p.Id)));
return queryable.Where(s => s.Metadata.People.Any(p => people.Contains(p.PersonId)));
case FilterComparison.NotEqual:
case FilterComparison.NotContains:
return queryable.Where(s => s.Metadata.People.All(t => !people.Contains(t.Id)));
return queryable.Where(s => s.Metadata.People.All(t => !people.Contains(t.PersonId)));
case FilterComparison.MustContains:
// Deconstruct and do a Union of a bunch of where statements since this doesn't translate
var queries = new List<IQueryable<Series>>()
{
queryable
};
queries.AddRange(people.Select(gId => queryable.Where(s => s.Metadata.People.Any(p => p.Id == gId))));
queries.AddRange(people.Select(gId => queryable.Where(s => s.Metadata.People.Any(p => p.PersonId == gId))));
return queries.Aggregate((q1, q2) => q1.Intersect(q2));
case FilterComparison.IsEmpty:
// Check if there are no people with specific roles (e.g., Writer, Penciller, etc.)
return queryable.Where(s => !s.Metadata.People.Any(p => p.Role == role));
return queryable.Where(s => s.Metadata.People.All(p => p.Role != role));
case FilterComparison.GreaterThan:
case FilterComparison.GreaterThanEqual:
case FilterComparison.LessThan:
@ -513,17 +513,17 @@ public static class SeriesFilter
{
case FilterComparison.Equal:
case FilterComparison.Contains:
return queryable.Where(s => s.Metadata.People.Any(p => people.Contains(p.Id)));
return queryable.Where(s => s.Metadata.People.Any(p => people.Contains(p.PersonId)));
case FilterComparison.NotEqual:
case FilterComparison.NotContains:
return queryable.Where(s => s.Metadata.People.All(t => !people.Contains(t.Id)));
return queryable.Where(s => s.Metadata.People.All(t => !people.Contains(t.PersonId)));
case FilterComparison.MustContains:
// Deconstruct and do a Union of a bunch of where statements since this doesn't translate
var queries = new List<IQueryable<Series>>()
{
queryable
};
queries.AddRange(people.Select(gId => queryable.Where(s => s.Metadata.People.Any(p => p.Id == gId))));
queries.AddRange(people.Select(gId => queryable.Where(s => s.Metadata.People.Any(p => p.PersonId == gId))));
return queries.Aggregate((q1, q2) => q1.Intersect(q2));
case FilterComparison.IsEmpty:

View File

@ -56,7 +56,8 @@ public static class IncludesExtensions
if (includes.HasFlag(ChapterIncludes.People))
{
queryable = queryable
.Include(c => c.People);
.Include(c => c.People)
.ThenInclude(cp => cp.Person);
}
if (includes.HasFlag(ChapterIncludes.Genres))
@ -161,17 +162,16 @@ public static class IncludesExtensions
if (includeFlags.HasFlag(SeriesIncludes.Metadata))
{
query = query.Include(s => s.Metadata)
.ThenInclude(m => m.CollectionTags.OrderBy(g => g.NormalizedTitle))
query = query
.Include(s => s.Metadata)
.ThenInclude(m => m.Genres.OrderBy(g => g.NormalizedTitle))
.Include(s => s.Metadata)
.ThenInclude(m => m.People)
.ThenInclude(smp => smp.Person)
.Include(s => s.Metadata)
.ThenInclude(m => m.Tags.OrderBy(g => g.NormalizedTitle));
}
return query.AsSplitQuery();
}

View File

@ -25,6 +25,19 @@ public static class RestrictByAgeExtensions
return q;
}
public static IQueryable<Chapter> RestrictAgainstAgeRestriction(this IQueryable<Chapter> queryable, AgeRestriction restriction)
{
if (restriction.AgeRating == AgeRating.NotApplicable) return queryable;
var q = queryable.Where(chapter => chapter.Volume.Series.Metadata.AgeRating <= restriction.AgeRating);
if (!restriction.IncludeUnknowns)
{
return q.Where(s => s.Volume.Series.Metadata.AgeRating != AgeRating.Unknown);
}
return q;
}
[Obsolete]
public static IQueryable<CollectionTag> RestrictAgainstAgeRestriction(this IQueryable<CollectionTag> queryable, AgeRestriction restriction)
{
@ -88,12 +101,12 @@ public static class RestrictByAgeExtensions
if (restriction.IncludeUnknowns)
{
return queryable.Where(c => c.SeriesMetadatas.All(sm =>
sm.AgeRating <= restriction.AgeRating));
return queryable.Where(c => c.SeriesMetadataPeople.All(sm =>
sm.SeriesMetadata.AgeRating <= restriction.AgeRating));
}
return queryable.Where(c => c.SeriesMetadatas.All(sm =>
sm.AgeRating <= restriction.AgeRating && sm.AgeRating > AgeRating.Unknown));
return queryable.Where(c => c.SeriesMetadataPeople.All(sm =>
sm.SeriesMetadata.AgeRating <= restriction.AgeRating && sm.SeriesMetadata.AgeRating > AgeRating.Unknown));
}
public static IQueryable<ReadingList> RestrictAgainstAgeRestriction(this IQueryable<ReadingList> queryable, AgeRestriction restriction)

View File

@ -28,6 +28,12 @@ public static class SeriesExtensions
firstVolume = volumes[1];
}
// If the first volume is 0, then use Volume 1
if (firstVolume.MinNumber.Is(0f) && volumes.Count > 1)
{
firstVolume = volumes[1];
}
var chapters = firstVolume.Chapters
.OrderBy(c => c.SortOrder)
.ToList();

View File

@ -95,59 +95,73 @@ public class AutoMapperProfiles : Profile
opt =>
opt.MapFrom(
src => src.PagesRead));
CreateMap<SeriesMetadata, SeriesMetadataDto>()
.ForMember(dest => dest.Writers,
opt =>
opt.MapFrom(
src => src.People.Where(p => p.Role == PersonRole.Writer).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.CoverArtists,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.CoverArtist).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Characters,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Character).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Publishers,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Publisher).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Colorists,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Colorist).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Inkers,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Inker).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Imprints,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Imprint).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Letterers,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Letterer).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Pencillers,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Penciller).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Translators,
opt =>
opt.MapFrom(src =>
src.People.Where(p => p.Role == PersonRole.Translator).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Editors,
opt =>
opt.MapFrom(
src => src.People.Where(p => p.Role == PersonRole.Editor).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Teams,
opt =>
opt.MapFrom(
src => src.People.Where(p => p.Role == PersonRole.Team).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Locations,
opt =>
opt.MapFrom(
src => src.People.Where(p => p.Role == PersonRole.Location).OrderBy(p => p.NormalizedName)))
// Map Writers
.ForMember(dest => dest.Writers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Writer)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map CoverArtists
.ForMember(dest => dest.CoverArtists, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.CoverArtist)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Publishers
.ForMember(dest => dest.Publishers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Publisher)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Characters
.ForMember(dest => dest.Characters, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Character)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Pencillers
.ForMember(dest => dest.Pencillers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Penciller)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Inkers
.ForMember(dest => dest.Inkers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Inker)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Imprints
.ForMember(dest => dest.Imprints, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Imprint)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Colorists
.ForMember(dest => dest.Colorists, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Colorist)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Letterers
.ForMember(dest => dest.Letterers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Letterer)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Editors
.ForMember(dest => dest.Editors, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Editor)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Translators
.ForMember(dest => dest.Translators, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Translator)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Teams
.ForMember(dest => dest.Teams, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Team)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Locations
.ForMember(dest => dest.Locations, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Location)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Genres,
opt =>
opt.MapFrom(
@ -157,89 +171,73 @@ public class AutoMapperProfiles : Profile
opt.MapFrom(
src => src.Tags.OrderBy(p => p.NormalizedTitle)));
CreateMap<Chapter, ChapterMetadataDto>()
.ForMember(dest => dest.Writers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Writer).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.CoverArtists,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.CoverArtist).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Colorists,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Colorist).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Inkers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Imprints,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Imprint).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Letterers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Pencillers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Penciller).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Publishers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Publisher).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Translators,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Translator).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Characters,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Editors,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Teams,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Team).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Locations,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Location).OrderBy(p => p.NormalizedName)))
;
CreateMap<Chapter, ChapterDto>()
.ForMember(dest => dest.Writers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Writer).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.CoverArtists,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.CoverArtist).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Colorists,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Colorist).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Inkers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Inker).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Imprints,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Imprint).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Letterers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Letterer).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Pencillers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Penciller).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Publishers,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Publisher).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Translators,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Translator).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Characters,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Character).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Editors,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Editor).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Teams,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Team).OrderBy(p => p.NormalizedName)))
.ForMember(dest => dest.Locations,
opt =>
opt.MapFrom(src => src.People.Where(p => p.Role == PersonRole.Location).OrderBy(p => p.NormalizedName)))
;
// Map Writers
.ForMember(dest => dest.Writers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Writer)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map CoverArtists
.ForMember(dest => dest.CoverArtists, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.CoverArtist)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Publishers
.ForMember(dest => dest.Publishers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Publisher)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Characters
.ForMember(dest => dest.Characters, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Character)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Pencillers
.ForMember(dest => dest.Pencillers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Penciller)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Inkers
.ForMember(dest => dest.Inkers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Inker)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Imprints
.ForMember(dest => dest.Imprints, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Imprint)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Colorists
.ForMember(dest => dest.Colorists, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Colorist)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Letterers
.ForMember(dest => dest.Letterers, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Letterer)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Editors
.ForMember(dest => dest.Editors, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Editor)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Translators
.ForMember(dest => dest.Translators, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Translator)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Teams
.ForMember(dest => dest.Teams, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Team)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)))
// Map Locations
.ForMember(dest => dest.Locations, opt => opt.MapFrom(src => src.People
.Where(cp => cp.Role == PersonRole.Location)
.Select(cp => cp.Person)
.OrderBy(p => p.NormalizedName)));
CreateMap<AppUser, UserDto>()
.ForMember(dest => dest.AgeRestriction,
@ -337,5 +335,11 @@ public class AutoMapperProfiles : Profile
CreateMap<MangaFile, FileExtensionExportDto>();
CreateMap<Chapter, StandaloneChapterDto>()
.ForMember(dest => dest.SeriesId, opt => opt.MapFrom(src => src.Volume.SeriesId))
.ForMember(dest => dest.VolumeTitle, opt => opt.MapFrom(src => src.Volume.Name))
.ForMember(dest => dest.LibraryId, opt => opt.MapFrom(src => src.Volume.Series.LibraryId))
.ForMember(dest => dest.LibraryType, opt => opt.MapFrom(src => src.Volume.Series.Library.Type));
}
}

View File

@ -142,4 +142,17 @@ public class ChapterBuilder : IEntityBuilder<Chapter>
_chapter.CreatedUtc = created.ToUniversalTime();
return this;
}
public ChapterBuilder WithPerson(Person person, PersonRole role)
{
_chapter.People ??= new List<ChapterPeople>();
_chapter.People.Add(new ChapterPeople()
{
Person = person,
Role = role,
Chapter = _chapter,
});
return this;
}
}

View File

@ -11,15 +11,14 @@ public class PersonBuilder : IEntityBuilder<Person>
private readonly Person _person;
public Person Build() => _person;
public PersonBuilder(string name, PersonRole role)
public PersonBuilder(string name)
{
_person = new Person()
{
Name = name.Trim(),
NormalizedName = name.ToNormalized(),
Role = role,
ChapterMetadatas = new List<Chapter>(),
SeriesMetadatas = new List<SeriesMetadata>()
SeriesMetadataPeople = new List<SeriesMetadataPeople>(),
ChapterPeople = new List<ChapterPeople>()
};
}
@ -34,10 +33,10 @@ public class PersonBuilder : IEntityBuilder<Person>
return this;
}
public PersonBuilder WithSeriesMetadata(SeriesMetadata metadata)
public PersonBuilder WithSeriesMetadata(SeriesMetadataPeople seriesMetadataPeople)
{
_person.SeriesMetadatas ??= new List<SeriesMetadata>();
_person.SeriesMetadatas.Add(metadata);
_person.SeriesMetadataPeople.Add(seriesMetadataPeople);
return this;
}
}

View File

@ -17,7 +17,7 @@ public class SeriesMetadataBuilder : IEntityBuilder<SeriesMetadata>
CollectionTags = new List<CollectionTag>(),
Genres = new List<Genre>(),
Tags = new List<Tag>(),
People = new List<Person>()
People = new List<SeriesMetadataPeople>()
};
}
@ -45,4 +45,17 @@ public class SeriesMetadataBuilder : IEntityBuilder<SeriesMetadata>
_seriesMetadata.AgeRating = rating;
return this;
}
public SeriesMetadataBuilder WithPerson(Person person, PersonRole role)
{
_seriesMetadata.People ??= new List<SeriesMetadataPeople>();
_seriesMetadata.People.Add(new SeriesMetadataPeople()
{
Role = role,
Person = person,
SeriesMetadata = _seriesMetadata,
});
return this;
}
}

View File

@ -14,8 +14,8 @@ public interface ICacheHelper
bool CoverImageExists(string path);
bool IsFileUnmodifiedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile firstFile);
bool HasFileChangedSinceLastScan(DateTime lastScan, bool forceUpdate, MangaFile firstFile);
bool IsFileUnmodifiedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile? firstFile);
bool HasFileChangedSinceLastScan(DateTime lastScan, bool forceUpdate, MangaFile? firstFile);
}
@ -56,7 +56,7 @@ public class CacheHelper : ICacheHelper
/// <param name="forceUpdate"></param>
/// <param name="firstFile"></param>
/// <returns></returns>
public bool IsFileUnmodifiedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile firstFile)
public bool IsFileUnmodifiedSinceCreationOrLastScan(IEntityDate chapter, bool forceUpdate, MangaFile? firstFile)
{
return firstFile != null &&
(!forceUpdate &&
@ -71,7 +71,7 @@ public class CacheHelper : ICacheHelper
/// <param name="forceUpdate">Should we ignore any logic and force this to return true</param>
/// <param name="firstFile">The file in question</param>
/// <returns></returns>
public bool HasFileChangedSinceLastScan(DateTime lastScan, bool forceUpdate, MangaFile firstFile)
public bool HasFileChangedSinceLastScan(DateTime lastScan, bool forceUpdate, MangaFile? firstFile)
{
if (firstFile == null) return false;
if (forceUpdate) return true;

View File

@ -1,153 +1,120 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Metadata;
using API.Entities;
using API.Extensions;
using API.Helpers.Builders;
using Microsoft.EntityFrameworkCore;
namespace API.Helpers;
#nullable enable
public static class GenreHelper
{
public static void UpdateGenre(Dictionary<string, Genre> allGenres,
IEnumerable<string> names, Action<Genre, bool> action)
public static async Task UpdateChapterGenres(Chapter chapter, IEnumerable<string> genreNames, IUnitOfWork unitOfWork)
{
foreach (var name in names)
{
var normalizedName = name.ToNormalized();
if (string.IsNullOrEmpty(normalizedName)) continue;
// Normalize genre names once and store them in a hash set for quick lookups
var normalizedGenresToAdd = new HashSet<string>(genreNames.Select(g => g.ToNormalized()));
if (allGenres.TryGetValue(normalizedName, out var genre))
// Remove genres that are no longer in the new list
var genresToRemove = chapter.Genres
.Where(g => !normalizedGenresToAdd.Contains(g.NormalizedTitle))
.ToList();
if (genresToRemove.Count > 0)
{
foreach (var genreToRemove in genresToRemove)
{
action(genre, false);
chapter.Genres.Remove(genreToRemove);
}
else
}
// Get all normalized titles to query the database for existing genres
var existingGenreTitles = await unitOfWork.DataContext.Genre
.Where(g => normalizedGenresToAdd.Contains(g.NormalizedTitle))
.ToDictionaryAsync(g => g.NormalizedTitle);
// Find missing genres that are not in the database
var missingGenres = normalizedGenresToAdd
.Where(nt => !existingGenreTitles.ContainsKey(nt))
.Select(title => new GenreBuilder(title).Build())
.ToList();
// Add missing genres to the database
if (missingGenres.Count > 0)
{
unitOfWork.DataContext.Genre.AddRange(missingGenres);
await unitOfWork.CommitAsync();
// Add newly inserted genres to existing genres dictionary for easier lookup
foreach (var genre in missingGenres)
{
genre = new GenreBuilder(name).Build();
allGenres.Add(normalizedName, genre);
action(genre, true);
existingGenreTitles[genre.NormalizedTitle] = genre;
}
}
// Add genres that are either existing or newly added to the chapter
foreach (var normalizedTitle in normalizedGenresToAdd)
{
var genre = existingGenreTitles[normalizedTitle];
if (!chapter.Genres.Contains(genre))
{
chapter.Genres.Add(genre);
}
}
}
public static void KeepOnlySameGenreBetweenLists(ICollection<Genre> existingGenres, ICollection<Genre> removeAllExcept, Action<Genre>? action = null)
{
var existing = existingGenres.ToList();
foreach (var genre in existing)
{
var existingPerson = removeAllExcept.FirstOrDefault(g => genre.NormalizedTitle != null && genre.NormalizedTitle.Equals(g.NormalizedTitle));
if (existingPerson != null) continue;
existingGenres.Remove(genre);
action?.Invoke(genre);
}
}
/// <summary>
/// Adds the genre to the list if it's not already in there.
/// </summary>
/// <param name="metadataGenres"></param>
/// <param name="genre"></param>
public static void AddGenreIfNotExists(ICollection<Genre> metadataGenres, Genre genre)
{
var existingGenre = metadataGenres.FirstOrDefault(p =>
p.NormalizedTitle.Equals(genre.Title?.ToNormalized()));
if (existingGenre == null)
{
metadataGenres.Add(genre);
}
}
public static void UpdateGenreList(ICollection<GenreTagDto>? tags, Series series,
IReadOnlyCollection<Genre> allTags, Action<Genre> handleAdd, Action onModified)
public static void UpdateGenreList(ICollection<GenreTagDto>? existingGenres, Series series,
IReadOnlyCollection<Genre> newGenres, Action<Genre> handleAdd, Action onModified)
{
// TODO: Write some unit tests
if (tags == null) return;
if (existingGenres == null) return;
var isModified = false;
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
var existingTags = series.Metadata.Genres.ToList();
// Convert tags and existing genres to hash sets for quick lookups by normalized title
var tagSet = new HashSet<string>(existingGenres.Select(t => t.Title.ToNormalized()));
var genreSet = new HashSet<string>(series.Metadata.Genres.Select(g => g.NormalizedTitle));
// Remove tags that are no longer present in the input tags
var existingTags = series.Metadata.Genres.ToList(); // Copy to avoid modifying collection while iterating
foreach (var existing in existingTags)
{
if (tags.SingleOrDefault(t => t.Title.ToNormalized().Equals(existing.NormalizedTitle)) == null)
if (!tagSet.Contains(existing.NormalizedTitle)) // This correctly ensures removal of non-present tags
{
// Remove tag
series.Metadata.Genres.Remove(existing);
isModified = true;
}
}
// At this point, all tags that aren't in dto have been removed.
foreach (var tagTitle in tags.Select(t => t.Title))
// Prepare a dictionary for quick lookup of genres from the `newGenres` collection by normalized title
var allTagsDict = newGenres.ToDictionary(t => t.NormalizedTitle);
// Add new tags from the input list
foreach (var tagDto in existingGenres)
{
var normalizedTitle = tagTitle.ToNormalized();
var existingTag = allTags.SingleOrDefault(t => t.NormalizedTitle.Equals(normalizedTitle));
if (existingTag != null)
var normalizedTitle = tagDto.Title.ToNormalized();
if (!genreSet.Contains(normalizedTitle)) // This prevents re-adding existing genres
{
if (series.Metadata.Genres.All(t => !t.NormalizedTitle.Equals(normalizedTitle)))
if (allTagsDict.TryGetValue(normalizedTitle, out var existingTag))
{
handleAdd(existingTag);
isModified = true;
handleAdd(existingTag); // Add existing tag from allTagsDict
}
}
else
{
// Add new tag
handleAdd(new GenreBuilder(tagTitle).Build());
isModified = true;
}
}
if (isModified)
{
onModified();
}
}
public static void UpdateGenreList(ICollection<GenreTagDto>? tags, Chapter chapter,
IReadOnlyCollection<Genre> allTags, Action<Genre> handleAdd, Action onModified)
{
// TODO: Write some unit tests
if (tags == null) return;
var isModified = false;
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
var existingTags = chapter.Genres.ToList();
foreach (var existing in existingTags)
{
if (tags.SingleOrDefault(t => t.Title.ToNormalized().Equals(existing.NormalizedTitle)) == null)
{
// Remove tag
chapter.Genres.Remove(existing);
isModified = true;
}
}
// At this point, all tags that aren't in dto have been removed.
foreach (var tagTitle in tags.Select(t => t.Title))
{
var normalizedTitle = tagTitle.ToNormalized();
var existingTag = allTags.SingleOrDefault(t => t.NormalizedTitle.Equals(normalizedTitle));
if (existingTag != null)
{
if (chapter.Genres.All(t => !t.NormalizedTitle.Equals(normalizedTitle)))
else
{
handleAdd(existingTag);
isModified = true;
handleAdd(new GenreBuilder(tagDto.Title).Build()); // Add new genre if not found
}
}
else
{
// Add new tag
handleAdd(new GenreBuilder(tagTitle).Build());
isModified = true;
}
}
// Call onModified if any changes were made
if (isModified)
{
onModified();

View File

@ -1,210 +1,190 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
using API.Entities.Metadata;
using API.Extensions;
using API.Helpers.Builders;
namespace API.Helpers;
#nullable enable
// This isn't needed in the new person architecture
public static class PersonHelper
{
/// <summary>
/// Given a list of all existing people, this will check the new names and roles and if it doesn't exist in allPeople, will create and
/// add an entry. For each person in name, the callback will be executed.
/// </summary>
/// <remarks>This does not remove people if an empty list is passed into names</remarks>
/// <remarks>This is used to add new people to a list without worrying about duplicating rows in the DB</remarks>
/// <param name="allPeople"></param>
/// <param name="names"></param>
/// <param name="role"></param>
/// <param name="action"></param>
public static void UpdatePeople(ICollection<Person> allPeople, IEnumerable<string> names, PersonRole role, Action<Person> action)
public static async Task UpdateSeriesMetadataPeopleAsync(SeriesMetadata metadata, ICollection<SeriesMetadataPeople> metadataPeople,
IEnumerable<ChapterPeople> chapterPeople, PersonRole role, IUnitOfWork unitOfWork)
{
var allPeopleTypeRole = allPeople.Where(p => p.Role == role).ToList();
var modification = false;
foreach (var name in names)
// Get all normalized names of people with the specified role from chapterPeople
var peopleToAdd = chapterPeople
.Where(cp => cp.Role == role)
.Select(cp => cp.Person.NormalizedName)
.ToList();
// Prepare a HashSet for quick lookup of people to add
var peopleToAddSet = new HashSet<string>(peopleToAdd);
// Get all existing people from metadataPeople with the specified role
var existingMetadataPeople = metadataPeople
.Where(mp => mp.Role == role)
.ToList();
// Identify people to remove from metadataPeople
var peopleToRemove = existingMetadataPeople
.Where(person => !peopleToAddSet.Contains(person.Person.NormalizedName))
.ToList();
// Remove identified people from metadataPeople
foreach (var personToRemove in peopleToRemove)
{
var normalizedName = name.ToNormalized();
// BUG: Doesn't this create a duplicate entry because allPeopleTypeRoles is a different instance?
var person = allPeopleTypeRole.Find(p =>
p.NormalizedName != null && p.NormalizedName.Equals(normalizedName));
if (person == null)
{
person = new PersonBuilder(name, role).Build();
allPeople.Add(person);
}
metadataPeople.Remove(personToRemove);
modification = true;
}
action(person);
// Bulk fetch existing people from the repository
var existingPeopleInDb = await unitOfWork.PersonRepository
.GetPeopleByNames(peopleToAdd);
// Prepare a dictionary for quick lookup of existing people by normalized name
var existingPeopleDict = new Dictionary<string, Person>();
foreach (var person in existingPeopleInDb)
{
existingPeopleDict.TryAdd(person.NormalizedName, person);
}
// Track the people to attach (newly created people)
var peopleToAttach = new List<Person>();
// Identify new people (not already in metadataPeople) to add
foreach (var personName in peopleToAdd)
{
// Check if the person already exists in metadataPeople with the specific role
var personAlreadyInMetadata = metadataPeople
.Any(mp => mp.Person.NormalizedName == personName && mp.Role == role);
if (!personAlreadyInMetadata)
{
// Check if the person exists in the database
if (!existingPeopleDict.TryGetValue(personName, out var dbPerson))
{
// If not, create a new Person entity
dbPerson = new PersonBuilder(personName).Build();
peopleToAttach.Add(dbPerson); // Add new person to the list to be attached
modification = true;
}
// Add the person to the SeriesMetadataPeople collection
metadataPeople.Add(new SeriesMetadataPeople
{
PersonId = dbPerson.Id, // EF Core will automatically update this after attach
Person = dbPerson,
SeriesMetadataId = metadata.Id,
SeriesMetadata = metadata,
Role = role
});
modification = true;
}
}
// Attach all new people in one go (EF Core will assign IDs after commit)
if (peopleToAttach.Count != 0)
{
await unitOfWork.DataContext.Person.AddRangeAsync(peopleToAttach);
}
// Commit the changes if any modifications were made
if (modification)
{
await unitOfWork.CommitAsync();
}
}
/// <summary>
/// Remove people on a list for a given role
/// </summary>
/// <remarks>Used to remove before we update/add new people</remarks>
/// <param name="existingPeople">Existing people on Entity</param>
/// <param name="people">People from metadata</param>
/// <param name="role">Role to filter on</param>
/// <param name="action">Callback which will be executed for each person removed</param>
public static void RemovePeople(ICollection<Person> existingPeople, IEnumerable<string> people, PersonRole role, Action<Person>? action = null)
public static async Task UpdateChapterPeopleAsync(Chapter chapter, IList<string> people, PersonRole role, IUnitOfWork unitOfWork)
{
var normalizedPeople = people.Select(Services.Tasks.Scanner.Parser.Parser.Normalize).ToList();
if (normalizedPeople.Count == 0)
{
var peopleToRemove = existingPeople.Where(p => p.Role == role).ToList();
foreach (var existingRoleToRemove in peopleToRemove)
{
existingPeople.Remove(existingRoleToRemove);
action?.Invoke(existingRoleToRemove);
}
return;
}
var modification = false;
foreach (var person in normalizedPeople)
{
var existingPerson = existingPeople.FirstOrDefault(p => p.Role == role && person.Equals(p.NormalizedName));
if (existingPerson == null) continue;
// Normalize the input names for comparison
var normalizedPeople = people.Select(p => p.ToNormalized()).Distinct().ToList(); // Ensure distinct people
existingPeople.Remove(existingPerson);
action?.Invoke(existingPerson);
}
// Get all existing ChapterPeople for the role
var existingChapterPeople = chapter.People
.Where(cp => cp.Role == role)
.ToList();
}
// Prepare a hash set for quick lookup of existing people by name
var existingPeopleNames = new HashSet<string>(existingChapterPeople.Select(cp => cp.Person.NormalizedName));
/// <summary>
/// Removes all people that are not present in the removeAllExcept list.
/// </summary>
/// <param name="existingPeople"></param>
/// <param name="removeAllExcept"></param>
/// <param name="action">Callback for all entities that should be removed</param>
public static void KeepOnlySamePeopleBetweenLists(IEnumerable<Person> existingPeople, ICollection<Person> removeAllExcept, Action<Person>? action = null)
{
// Bulk select all people from the repository whose names are in the provided list
var existingPeople = await unitOfWork.PersonRepository.GetPeopleByNames(normalizedPeople);
// Prepare a dictionary for quick lookup by normalized name
var existingPeopleDict = new Dictionary<string, Person>();
foreach (var person in existingPeople)
{
var existingPerson = removeAllExcept
.FirstOrDefault(p => p.Role == person.Role && person.NormalizedName.Equals(p.NormalizedName));
if (existingPerson == null)
{
action?.Invoke(person);
}
}
}
/// <summary>
/// Adds the person to the list if it's not already in there
/// </summary>
/// <param name="metadataPeople"></param>
/// <param name="person"></param>
public static void AddPersonIfNotExists(ICollection<Person> metadataPeople, Person person)
{
if (string.IsNullOrEmpty(person.Name)) return;
var existingPerson = metadataPeople.FirstOrDefault(p =>
p.NormalizedName == person.Name.ToNormalized() && p.Role == person.Role);
if (existingPerson == null)
{
metadataPeople.Add(person);
}
}
/// <summary>
/// For a given role and people dtos, update a series
/// </summary>
/// <param name="role"></param>
/// <param name="people"></param>
/// <param name="series"></param>
/// <param name="allPeople"></param>
/// <param name="handleAdd">This will call with an existing or new tag, but the method does not update the series Metadata</param>
/// <param name="onModified"></param>
public static void UpdatePeopleList(PersonRole role, ICollection<PersonDto>? people, Series series, IReadOnlyCollection<Person> allPeople,
Action<Person> handleAdd, Action onModified)
{
if (people == null) return;
var isModified = false;
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
var existingTags = series.Metadata.People.Where(p => p.Role == role).ToList();
foreach (var existing in existingTags)
{
if (people.SingleOrDefault(t => t.Id == existing.Id) == null) // This needs to check against role
{
// Remove tag
series.Metadata.People.Remove(existing);
isModified = true;
}
existingPeopleDict.TryAdd(person.NormalizedName, person);
}
// At this point, all tags that aren't in dto have been removed.
foreach (var tag in people)
// Identify people to remove (those present in ChapterPeople but not in the new list)
foreach (var existingChapterPerson in existingChapterPeople
.Where(existingChapterPerson => !normalizedPeople.Contains(existingChapterPerson.Person.NormalizedName)))
{
var existingTag = allPeople.FirstOrDefault(t => t.Name == tag.Name && t.Role == tag.Role);
if (existingTag != null)
{
if (series.Metadata.People.Where(t => t.Role == tag.Role).All(t => t.Name != null && !t.Name.Equals(tag.Name)))
{
handleAdd(existingTag);
isModified = true;
}
}
else
{
// Add new tag
handleAdd(new PersonBuilder(tag.Name, role).Build());
isModified = true;
}
chapter.People.Remove(existingChapterPerson);
unitOfWork.PersonRepository.Remove(existingChapterPerson);
modification = true;
}
if (isModified)
{
onModified();
}
}
// Identify new people to add
var newPeopleNames = normalizedPeople
.Where(p => !existingPeopleNames.Contains(p))
.ToList();
public static void UpdatePeopleList(PersonRole role, ICollection<PersonDto>? people, Chapter chapter, IReadOnlyCollection<Person> allPeople,
Action<Person> handleAdd, Action onModified)
{
if (people == null) return;
var isModified = false;
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
var existingTags = chapter.People.Where(p => p.Role == role).ToList();
foreach (var existing in existingTags)
if (newPeopleNames.Count > 0)
{
if (people.SingleOrDefault(t => t.Id == existing.Id) == null) // This needs to check against role
// Bulk insert new people (if they don't already exist in the database)
var newPeople = newPeopleNames
.Where(name => !existingPeopleDict.ContainsKey(name)) // Avoid adding duplicates
.Select(name => new PersonBuilder(name).Build())
.ToList();
foreach (var newPerson in newPeople)
{
// Remove tag
chapter.People.Remove(existing);
isModified = true;
unitOfWork.DataContext.Person.Attach(newPerson);
existingPeopleDict[newPerson.NormalizedName] = newPerson;
}
await unitOfWork.CommitAsync();
modification = true;
}
// At this point, all tags that aren't in dto have been removed.
foreach (var tag in people)
// Add all people (both existing and newly created) to the ChapterPeople
foreach (var personName in normalizedPeople)
{
var existingTag = allPeople.FirstOrDefault(t => t.Name == tag.Name && t.Role == tag.Role);
if (existingTag != null)
var person = existingPeopleDict[personName];
// Check if the person with the specific role is already added to the chapter's People collection
if (chapter.People.Any(cp => cp.PersonId == person.Id && cp.Role == role)) continue;
chapter.People.Add(new ChapterPeople
{
if (chapter.People.Where(t => t.Role == tag.Role).All(t => t.Name != null && !t.Name.Equals(tag.Name)))
{
handleAdd(existingTag);
isModified = true;
}
}
else
{
// Add new tag
handleAdd(new PersonBuilder(tag.Name, role).Build());
isModified = true;
}
PersonId = person.Id,
ChapterId = chapter.Id,
Role = role
});
modification = true;
}
if (isModified)
// Commit the changes to remove and add people
if (modification)
{
onModified();
await unitOfWork.CommitAsync();
}
}
@ -220,7 +200,9 @@ public static class PersonHelper
dto.Colorists.Count != 0 ||
dto.Letterers.Count != 0 ||
dto.Editors.Count != 0 ||
dto.Translators.Count != 0;
dto.Translators.Count != 0 ||
dto.Teams.Count != 0 ||
dto.Locations.Count != 0;
}
public static bool HasAnyPeople(UpdateChapterDto? dto)
@ -235,6 +217,8 @@ public static class PersonHelper
dto.Colorists.Count != 0 ||
dto.Letterers.Count != 0 ||
dto.Editors.Count != 0 ||
dto.Translators.Count != 0;
dto.Translators.Count != 0 ||
dto.Teams.Count != 0 ||
dto.Locations.Count != 0;
}
}

View File

@ -1,198 +1,147 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs.Metadata;
using API.Entities;
using API.Extensions;
using API.Helpers.Builders;
using API.Services.Tasks.Scanner.Parser;
using Microsoft.EntityFrameworkCore;
namespace API.Helpers;
#nullable enable
public static class TagHelper
{
public static void UpdateTag(Dictionary<string, Tag> allTags, IEnumerable<string> names, Action<Tag, bool> action)
public static async Task UpdateChapterTags(Chapter chapter, IEnumerable<string> tagNames, IUnitOfWork unitOfWork)
{
foreach (var name in names)
// Normalize tag names once and store them in a hash set for quick lookups
var normalizedTagsToAdd = new HashSet<string>(tagNames.Select(t => t.ToNormalized()));
var existingTagsSet = new HashSet<string>(chapter.Tags.Select(t => t.NormalizedTitle));
var isModified = false;
// Remove tags that are no longer present in the new list
var tagsToRemove = chapter.Tags
.Where(t => !normalizedTagsToAdd.Contains(t.NormalizedTitle))
.ToList();
if (tagsToRemove.Any())
{
if (string.IsNullOrEmpty(name.Trim())) continue;
var normalizedName = name.ToNormalized();
allTags.TryGetValue(normalizedName, out var tag);
var added = tag == null;
if (tag == null)
foreach (var tagToRemove in tagsToRemove)
{
tag = new TagBuilder(name).Build();
allTags.Add(normalizedName, tag);
chapter.Tags.Remove(tagToRemove);
}
action(tag, added);
isModified = true;
}
}
public static void KeepOnlySameTagBetweenLists(ICollection<Tag> existingTags, ICollection<Tag> removeAllExcept, Action<Tag>? action = null)
{
var existing = existingTags.ToList();
foreach (var genre in existing)
// Get all normalized titles for bulk lookup from the database
var existingTagTitles = await unitOfWork.DataContext.Tag
.Where(t => normalizedTagsToAdd.Contains(t.NormalizedTitle))
.ToDictionaryAsync(t => t.NormalizedTitle);
// Find missing tags that are not already in the database
var missingTags = normalizedTagsToAdd
.Where(nt => !existingTagTitles.ContainsKey(nt))
.Select(title => new TagBuilder(title).Build())
.ToList();
// Add missing tags to the database if any
if (missingTags.Any())
{
var existingPerson = removeAllExcept.FirstOrDefault(g => genre.NormalizedTitle.Equals(g.NormalizedTitle));
if (existingPerson != null) continue;
existingTags.Remove(genre);
action?.Invoke(genre);
unitOfWork.DataContext.Tag.AddRange(missingTags);
await unitOfWork.CommitAsync(); // Commit once after adding missing tags to avoid multiple DB calls
isModified = true;
// Update the dictionary with newly inserted tags for easier lookup
foreach (var tag in missingTags)
{
existingTagTitles[tag.NormalizedTitle] = tag;
}
}
// Add the new or existing tags to the chapter
foreach (var normalizedTitle in normalizedTagsToAdd)
{
var tag = existingTagTitles[normalizedTitle];
if (!existingTagsSet.Contains(normalizedTitle))
{
chapter.Tags.Add(tag);
isModified = true;
}
}
// Commit changes if modifications were made to the chapter's tags
if (isModified)
{
await unitOfWork.CommitAsync();
}
}
/// <summary>
/// Adds the tag to the list if it's not already in there. This will ignore the ExternalTag.
/// Returns a list of strings separated by ',', distinct by normalized names, already trimmed and empty entries removed.
/// </summary>
/// <param name="metadataTags"></param>
/// <param name="tag"></param>
public static void AddTagIfNotExists(ICollection<Tag> metadataTags, Tag tag)
{
var existingGenre = metadataTags.FirstOrDefault(p =>
p.NormalizedTitle == tag.Title.ToNormalized());
if (existingGenre == null)
{
metadataTags.Add(tag);
}
}
public static void AddTagIfNotExists(BlockingCollection<Tag> metadataTags, Tag tag)
{
var existingGenre = metadataTags.FirstOrDefault(p =>
p.NormalizedTitle == tag.Title.ToNormalized());
if (existingGenre == null)
{
metadataTags.Add(tag);
}
}
/// <param name="comicInfoTagSeparatedByComma"></param>
/// <returns></returns>
public static IList<string> GetTagValues(string comicInfoTagSeparatedByComma)
{
// TODO: Unit tests needed
// TODO: Refactor this into an Extension
if (string.IsNullOrEmpty(comicInfoTagSeparatedByComma))
{
return ImmutableList<string>.Empty;
}
return comicInfoTagSeparatedByComma.Split(",")
.Select(s => s.Trim())
return comicInfoTagSeparatedByComma.Split(',', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries)
.DistinctBy(Parser.Normalize)
.ToList();
}
/// <summary>
/// Remove tags on a list
/// </summary>
/// <remarks>Used to remove before we update/add new tags</remarks>
/// <param name="existingTags">Existing tags on Entity</param>
/// <param name="tags">Tags from metadata</param>
/// <param name="action">Callback which will be executed for each tag removed</param>
public static void RemoveTags(ICollection<Tag> existingTags, IEnumerable<string> tags, Action<Tag>? action = null)
{
var normalizedTags = tags.Select(Services.Tasks.Scanner.Parser.Parser.Normalize).ToList();
foreach (var person in normalizedTags)
{
var existingTag = existingTags.FirstOrDefault(p => person.Equals(p.NormalizedTitle));
if (existingTag == null) continue;
existingTags.Remove(existingTag);
action?.Invoke(existingTag);
}
}
public static void UpdateTagList(ICollection<TagDto>? tags, Series series, IReadOnlyCollection<Tag> allTags, Action<Tag> handleAdd, Action onModified)
{
if (tags == null) return;
var isModified = false;
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
var existingTags = series.Metadata.Tags.ToList();
foreach (var existing in existingTags.Where(existing => tags.SingleOrDefault(t => t.Id == existing.Id) == null))
var existingTags = series.Metadata.Tags;
// Create a HashSet for quick lookup of tag IDs
var tagIds = new HashSet<int>(tags.Select(t => t.Id));
// Remove tags that no longer exist in the provided tag list
var tagsToRemove = existingTags.Where(existing => !tagIds.Contains(existing.Id)).ToList();
if (tagsToRemove.Count > 0)
{
// Remove tag
series.Metadata.Tags.Remove(existing);
foreach (var tagToRemove in tagsToRemove)
{
existingTags.Remove(tagToRemove);
}
isModified = true;
}
// At this point, all tags that aren't in dto have been removed.
foreach (var tagTitle in tags.Select(t => t.Title))
{
var normalizedTitle = tagTitle.ToNormalized();
var existingTag = allTags.SingleOrDefault(t => t.NormalizedTitle.Equals(normalizedTitle));
if (existingTag != null)
{
if (series.Metadata.Tags.All(t => t.NormalizedTitle != normalizedTitle))
{
// Create a HashSet of normalized titles for quick lookups
var normalizedTitlesToAdd = new HashSet<string>(tags.Select(t => t.Title.ToNormalized()));
var existingNormalizedTitles = new HashSet<string>(existingTags.Select(t => t.NormalizedTitle));
handleAdd(existingTag);
isModified = true;
}
}
else
{
// Add new tag
handleAdd(new TagBuilder(tagTitle).Build());
isModified = true;
}
// Add missing tags based on normalized title comparison
foreach (var normalizedTitle in normalizedTitlesToAdd)
{
if (existingNormalizedTitles.Contains(normalizedTitle)) continue;
var existingTag = allTags.FirstOrDefault(t => t.NormalizedTitle == normalizedTitle);
handleAdd(existingTag ?? new TagBuilder(normalizedTitle).Build());
isModified = true;
}
// Call the modification handler if any changes were made
if (isModified)
{
onModified();
}
}
public static void UpdateTagList(ICollection<TagDto>? tags, Chapter chapter, IReadOnlyCollection<Tag> allTags, Action<Tag> handleAdd, Action onModified)
{
if (tags == null) return;
var isModified = false;
// I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different
var existingTags = chapter.Tags.ToList();
foreach (var existing in existingTags.Where(existing => tags.SingleOrDefault(t => t.Id == existing.Id) == null))
{
// Remove tag
chapter.Tags.Remove(existing);
isModified = true;
}
// At this point, all tags that aren't in dto have been removed.
foreach (var tagTitle in tags.Select(t => t.Title))
{
var normalizedTitle = tagTitle.ToNormalized();
var existingTag = allTags.SingleOrDefault(t => t.NormalizedTitle.Equals(normalizedTitle));
if (existingTag != null)
{
if (chapter.Tags.All(t => t.NormalizedTitle != normalizedTitle))
{
handleAdd(existingTag);
isModified = true;
}
}
else
{
// Add new tag
handleAdd(new TagBuilder(tagTitle).Build());
isModified = true;
}
}
if (isModified)
{
onModified();
}
}
}
#nullable disable

View File

@ -52,6 +52,8 @@
"collection-already-exists":"Collection already exists",
"error-import-stack": "There was an issue importing MAL stack",
"person-doesnt-exist": "Person does not exist",
"device-doesnt-exist": "Device does not exist",
"generic-device-create": "There was an error when creating the device",
"generic-device-update": "There was an error when updating the device",

View File

@ -55,7 +55,7 @@ public interface IDirectoryService
bool CopyDirectoryToDirectory(string? sourceDirName, string destDirName, string searchPattern = "");
Dictionary<string, string> FindHighestDirectoriesFromFiles(IEnumerable<string> libraryFolders,
IList<string> filePaths);
string? FindLowestDirectoriesFromFiles(IEnumerable<string> libraryFolders,
string? FindLowestDirectoriesFromFiles(IList<string> libraryFolders,
IList<string> filePaths);
IEnumerable<string> GetFoldersTillRoot(string rootPath, string fullPath);
IEnumerable<string> GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly);
@ -69,14 +69,13 @@ public interface IDirectoryService
SearchOption searchOption = SearchOption.TopDirectoryOnly);
IEnumerable<string> GetDirectories(string folderPath);
IEnumerable<string> GetDirectories(string folderPath, GlobMatcher? matcher);
IEnumerable<string> GetAllDirectories(string folderPath, GlobMatcher? matcher = null);
string GetParentDirectoryName(string fileOrFolder);
IList<string> ScanFiles(string folderPath, string fileTypes, GlobMatcher? matcher = null);
IList<string> ScanFiles(string folderPath, string fileTypes, GlobMatcher? matcher = null, SearchOption searchOption = SearchOption.AllDirectories);
DateTime GetLastWriteTime(string folderPath);
GlobMatcher? CreateMatcherFromFile(string filePath);
}
public class DirectoryService : IDirectoryService
{
public const string KavitaIgnoreFile = ".kavitaignore";
public IFileSystem FileSystem { get; }
public string CacheDirectory { get; }
public string CoverImageDirectory { get; }
@ -95,11 +94,9 @@ public class DirectoryService : IDirectoryService
private static readonly Regex ExcludeDirectories = new Regex(
@"@eaDir|\.DS_Store|\.qpkg|__MACOSX|@Recently-Snapshot|@recycle|\.@__thumb|\.caltrash|#recycle|\.yacreaderlibrary",
MatchOptions,
Tasks.Scanner.Parser.Parser.RegexTimeout);
MatchOptions, Parser.RegexTimeout);
private static readonly Regex FileCopyAppend = new Regex(@"\(\d+\)",
MatchOptions,
Tasks.Scanner.Parser.Parser.RegexTimeout);
MatchOptions, Parser.RegexTimeout);
public static readonly string BackupDirectory = Path.Join(Directory.GetCurrentDirectory(), "config", "backups");
public DirectoryService(ILogger<DirectoryService> logger, IFileSystem fileSystem)
@ -136,22 +133,38 @@ public class DirectoryService : IDirectoryService
/// </summary>
/// <remarks>This will always exclude <see cref="Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith"/> patterns</remarks>
/// <param name="path">Directory to search</param>
/// <param name="searchPatternExpression">Regex version of search pattern (ie \.mp3|\.mp4). Defaults to * meaning all files.</param>
/// <param name="searchPatternExpression">Regex version of search pattern (e.g., \.mp3|\.mp4). Defaults to * meaning all files.</param>
/// <param name="searchOption">SearchOption to use, defaults to TopDirectoryOnly</param>
/// <returns>List of file paths</returns>
public IEnumerable<string> GetFilesWithCertainExtensions(string path,
string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly)
{
if (!FileSystem.Directory.Exists(path)) return ImmutableList<string>.Empty;
var reSearchPattern = new Regex(searchPatternExpression, RegexOptions.IgnoreCase, Tasks.Scanner.Parser.Parser.RegexTimeout);
// If directory doesn't exist, exit the iterator with no results
if (!FileSystem.Directory.Exists(path))
yield break;
return FileSystem.Directory.EnumerateFiles(path, "*", searchOption)
.Where(file =>
reSearchPattern.IsMatch(FileSystem.Path.GetExtension(file)) && !FileSystem.Path.GetFileName(file).StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith));
// Compile the regex pattern for faster repeated matching
var reSearchPattern = new Regex(searchPatternExpression,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
Parser.RegexTimeout);
// Enumerate files in the directory and apply filters
foreach (var file in FileSystem.Directory.EnumerateFiles(path, "*", searchOption))
{
var fileName = FileSystem.Path.GetFileName(file);
var fileExtension = FileSystem.Path.GetExtension(file);
// Check if the file matches the pattern and exclude macOS metadata files
if (reSearchPattern.IsMatch(fileExtension) && !fileName.StartsWith(Parser.MacOsMetadataFileStartsWith))
{
yield return file;
}
}
}
/// <summary>
/// Returns a list of folders from end of fullPath to rootPath. If a file is passed at the end of the fullPath, it will be ignored.
///
@ -173,8 +186,6 @@ public class DirectoryService : IDirectoryService
rootPath = rootPath.Replace(FileSystem.Path.DirectorySeparatorChar, FileSystem.Path.AltDirectorySeparatorChar);
}
var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath;
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
var paths = new List<string>();
@ -215,25 +226,34 @@ public class DirectoryService : IDirectoryService
/// <returns></returns>
public IEnumerable<string> GetFiles(string path, string fileNameRegex = "", SearchOption searchOption = SearchOption.TopDirectoryOnly)
{
if (!FileSystem.Directory.Exists(path)) return ImmutableList<string>.Empty;
if (!FileSystem.Directory.Exists(path))
yield break; // Use yield break to exit the iterator early
if (fileNameRegex != string.Empty)
Regex? reSearchPattern = null;
if (!string.IsNullOrEmpty(fileNameRegex))
{
var reSearchPattern = new Regex(fileNameRegex, RegexOptions.IgnoreCase,
Tasks.Scanner.Parser.Parser.RegexTimeout);
return FileSystem.Directory.EnumerateFiles(path, "*", searchOption)
.Where(file =>
{
var fileName = FileSystem.Path.GetFileName(file);
return reSearchPattern.IsMatch(fileName) &&
!fileName.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith);
});
// Compile the regex for better performance when used frequently
reSearchPattern = new Regex(fileNameRegex, RegexOptions.IgnoreCase | RegexOptions.Compiled, Tasks.Scanner.Parser.Parser.RegexTimeout);
}
return FileSystem.Directory.EnumerateFiles(path, "*", searchOption).Where(file =>
!FileSystem.Path.GetFileName(file).StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith));
// Enumerate files lazily
foreach (var file in FileSystem.Directory.EnumerateFiles(path, "*", searchOption))
{
var fileName = FileSystem.Path.GetFileName(file);
// Exclude macOS metadata files
if (fileName.StartsWith(Tasks.Scanner.Parser.Parser.MacOsMetadataFileStartsWith))
continue;
// If a regex is provided, match the file name against it
if (reSearchPattern != null && !reSearchPattern.IsMatch(fileName))
continue;
yield return file; // Yield each matching file as it's found
}
}
/// <summary>
/// Copies a file into a directory. Does not maintain parent folder of file.
/// Will create target directory if doesn't exist. Automatically overwrites what is there.
@ -329,7 +349,7 @@ public class DirectoryService : IDirectoryService
return GetFilesWithCertainExtensions(path, searchPatternExpression).ToArray();
}
return !FileSystem.Directory.Exists(path) ? Array.Empty<string>() : FileSystem.Directory.GetFiles(path);
return !FileSystem.Directory.Exists(path) ? [] : FileSystem.Directory.GetFiles(path);
}
/// <summary>
@ -391,10 +411,12 @@ public class DirectoryService : IDirectoryService
{
foreach (var file in di.EnumerateFiles())
{
if (!file.Exists) continue;
file.Delete();
}
foreach (var dir in di.EnumerateDirectories())
{
if (!dir.Exists) continue;
dir.Delete(true);
}
}
@ -594,46 +616,60 @@ public class DirectoryService : IDirectoryService
/// <summary>
/// Finds the lowest directory from a set of file paths. Does not return the root path, will always select the lowest non-root path.
/// </summary>
/// <remarks>If the file paths do not contain anything from libraryFolders, this returns an empty dictionary back</remarks>
/// <remarks>If the file paths do not contain anything from libraryFolders, this returns null.</remarks>
/// <param name="libraryFolders">List of top level folders which files belong to</param>
/// <param name="filePaths">List of file paths that belong to libraryFolders</param>
/// <returns></returns>
public string? FindLowestDirectoriesFromFiles(IEnumerable<string> libraryFolders, IList<string> filePaths)
/// <returns>Lowest non-root path, or null if not found</returns>
public string? FindLowestDirectoriesFromFiles(IList<string> libraryFolders, IList<string> filePaths)
{
var dirs = new Dictionary<string, string>();
// Normalize the file paths only once
var normalizedFilePaths = filePaths.Select(Parser.NormalizePath).ToList();
foreach (var folder in libraryFolders.Select(Parser.NormalizePath))
// Use a list to store all directories for comparison
var dirs = new List<string>();
// Iterate through each library folder and collect matching directories
foreach (var normalizedFolder in libraryFolders.Select(Parser.NormalizePath))
{
foreach (var file in normalizedFilePaths)
{
if (!file.Contains(folder)) continue;
// If the file path contains the folder path, get its directory
if (!file.Contains(normalizedFolder)) continue;
var lowestPath = Path.GetDirectoryName(file);
var lowestPath = Path.GetDirectoryName(file);
if (!string.IsNullOrEmpty(lowestPath))
{
dirs.TryAdd(Parser.NormalizePath(lowestPath), string.Empty);
dirs.Add(Parser.NormalizePath(lowestPath)); // Add to list
}
}
}
if (dirs.Keys.Count == 1) return dirs.Keys.First();
if (dirs.Keys.Count > 1)
if (dirs.Count == 0)
{
// For each key, validate that each file exists in the key path
foreach (var folder in dirs.Keys)
{
if (normalizedFilePaths.TrueForAll(filePath => filePath.Contains(Parser.NormalizePath(folder))))
{
return folder;
}
}
return null; // No directories found
}
return null;
// Now find the deepest common directory among all paths
var commonPath = dirs.Aggregate(GetDeepestCommonPath); // Use new method to get deepest path
// Return the common path if it exists and is not one of the root directories
return libraryFolders.Any(folder => commonPath == Parser.NormalizePath(folder)) ? null : commonPath;
}
public static string GetDeepestCommonPath(string path1, string path2)
{
var parts1 = path1.Split(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
var parts2 = path2.Split(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
// Get the longest matching parts, ensuring that deeper parts in hierarchy are considered
var commonParts = parts1.Zip(parts2, (p1, p2) => p1 == p2 ? p1 : null)
.TakeWhile(part => part != null)
.ToArray();
return Parser.NormalizePath(string.Join(Path.DirectorySeparatorChar.ToString(), commonParts));
}
/// <summary>
/// Gets a set of directories from the folder path. Automatically excludes directories that shouldn't be in scope.
/// </summary>
@ -665,8 +701,9 @@ public class DirectoryService : IDirectoryService
/// Returns all directories, including subdirectories. Automatically excludes directories that shouldn't be in scope.
/// </summary>
/// <param name="folderPath"></param>
/// <param name="matcher"></param>
/// <returns></returns>
public IEnumerable<string> GetAllDirectories(string folderPath)
public IEnumerable<string> GetAllDirectories(string folderPath, GlobMatcher? matcher = null)
{
if (!FileSystem.Directory.Exists(folderPath)) return ImmutableArray<string>.Empty;
var directories = new List<string>();
@ -675,7 +712,7 @@ public class DirectoryService : IDirectoryService
foreach (var foundDir in foundDirs)
{
directories.Add(foundDir);
directories.AddRange(GetAllDirectories(foundDir));
directories.AddRange(GetAllDirectories(foundDir, matcher));
}
return directories;
@ -699,93 +736,82 @@ public class DirectoryService : IDirectoryService
}
/// <summary>
/// Scans a directory by utilizing a recursive folder search. If a .kavitaignore file is found, will ignore matching patterns
/// Scans a directory by utilizing a recursive folder search.
/// </summary>
/// <param name="folderPath"></param>
/// <param name="fileTypes"></param>
/// <param name="matcher"></param>
/// <param name="searchOption">Pass TopDirectories</param>
/// <returns></returns>
public IList<string> ScanFiles(string folderPath, string fileTypes, GlobMatcher? matcher = null)
public IList<string> ScanFiles(string folderPath, string fileTypes, GlobMatcher? matcher = null,
SearchOption searchOption = SearchOption.AllDirectories)
{
_logger.LogTrace("[ScanFiles] called on {Path}", folderPath);
var files = new List<string>();
if (!Exists(folderPath)) return files;
var potentialIgnoreFile = FileSystem.Path.Join(folderPath, KavitaIgnoreFile);
if (matcher == null)
if (searchOption == SearchOption.AllDirectories)
{
matcher = CreateMatcherFromFile(potentialIgnoreFile);
// Stack to hold directories to process
var directoriesToProcess = new Stack<string>();
directoriesToProcess.Push(folderPath);
while (directoriesToProcess.Count > 0)
{
var currentDirectory = directoriesToProcess.Pop();
// Get files from the current directory
var filesInCurrentDirectory = GetFilesWithCertainExtensions(currentDirectory, fileTypes);
files.AddRange(filesInCurrentDirectory);
// Get subdirectories and add them to the stack
var subdirectories = GetDirectories(currentDirectory, matcher);
foreach (var subdirectory in subdirectories)
{
directoriesToProcess.Push(subdirectory);
}
}
}
else
{
matcher.Merge(CreateMatcherFromFile(potentialIgnoreFile));
// If TopDirectoryOnly is specified, only get files in the specified folder
var filesInCurrentDirectory = GetFilesWithCertainExtensions(folderPath, fileTypes);
files.AddRange(filesInCurrentDirectory);
}
var directories = GetDirectories(folderPath, matcher);
foreach (var directory in directories)
// Filter out unwanted files based on matcher if provided
if (matcher != null)
{
files.AddRange(ScanFiles(directory, fileTypes, matcher));
}
// Get the matcher from either ignore or global (default setup)
if (matcher == null)
{
files.AddRange(GetFilesWithCertainExtensions(folderPath, fileTypes));
}
else
{
var foundFiles = GetFilesWithCertainExtensions(folderPath,
fileTypes)
.Where(file => !matcher.ExcludeMatches(FileSystem.FileInfo.New(file).Name));
files.AddRange(foundFiles);
files = files.Where(file => !matcher.ExcludeMatches(FileSystem.FileInfo.New(file).Name)).ToList();
}
return files;
}
/// <summary>
/// Recursively scans a folder and returns the max last write time on any folders and files
/// </summary>
/// <remarks>If the folder is empty or non-existant, this will return MaxValue for a DateTime</remarks>
/// <remarks>If the folder is empty or non-existent, this will return MaxValue for a DateTime</remarks>
/// <param name="folderPath"></param>
/// <returns>Max Last Write Time</returns>
public DateTime GetLastWriteTime(string folderPath)
{
if (!FileSystem.Directory.Exists(folderPath)) return DateTime.MaxValue;
var fileEntries = FileSystem.Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories);
if (fileEntries.Length == 0) return DateTime.MaxValue;
return fileEntries.Max(path => FileSystem.File.GetLastWriteTime(path));
}
/// <summary>
/// Generates a GlobMatcher from a .kavitaignore file found at path. Returns null otherwise.
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public GlobMatcher? CreateMatcherFromFile(string filePath)
{
if (!FileSystem.File.Exists(filePath))
{
return null;
}
// Find the max last write time of the files
var maxFiles = fileEntries.Max(path => FileSystem.File.GetLastWriteTime(path));
// Read file in and add each line to Matcher
var lines = FileSystem.File.ReadAllLines(filePath);
if (lines.Length == 0)
{
return null;
}
// Get the last write time of the directory itself
var directoryLastWriteTime = FileSystem.Directory.GetLastWriteTime(folderPath);
GlobMatcher matcher = new();
foreach (var line in lines.Where(s => !string.IsNullOrEmpty(s)))
{
matcher.AddExclude(line);
}
return matcher;
// Use comparison to get the max DateTime value
return directoryLastWriteTime > maxFiles ? directoryLastWriteTime : maxFiles;
}

View File

@ -888,6 +888,16 @@ public class ImageService : IImageService
return $"thumbnail{chapterId}";
}
/// <summary>
/// Returns the name format for a person cover
/// </summary>
/// <param name="personId"></param>
/// <returns></returns>
public static string GetPersonFormat(int personId)
{
return $"person{personId}";
}
public static string GetWebLinkFormat(string url, EncodeFormat encodeFormat)
{
return $"{new Uri(url).Host.Replace("www.", string.Empty)}{encodeFormat.GetExtension()}";

View File

@ -352,7 +352,7 @@ public class MetadataService : IMetadataService
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
/// <param name="forceUpdate">Overrides any cache logic and forces execution</param>
/// <param name="forceColorscape">Will ensure that the colorscape is regenned</param>
/// <param name="forceColorScape">Will ensure that the colorscape is regenerated</param>
public async Task GenerateCoversForSeries(int libraryId, int seriesId, bool forceUpdate = true, bool forceColorScape = true)
{
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);

View File

@ -426,6 +426,7 @@ public class ReadingListService : IReadingListService
var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
if (series == null || library == null) return;
await CreateReadingListsFromSeries(series, library);
}

View File

@ -111,7 +111,7 @@ public class SeriesService : ISeriesService
try
{
var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId;
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Metadata);
if (series == null) return false;
series.Metadata ??= new SeriesMetadataBuilder()
@ -201,76 +201,80 @@ public class SeriesService : ISeriesService
{
if (PersonHelper.HasAnyPeople(updateSeriesMetadataDto.SeriesMetadata))
{
void HandleAddPerson(Person person)
series.Metadata.People ??= new List<SeriesMetadataPeople>();
// Writers
if (!series.Metadata.WriterLocked)
{
PersonHelper.AddPersonIfNotExists(series.Metadata.People, person);
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Writers, PersonRole.Writer);
}
series.Metadata.People ??= new List<Person>();
var allWriters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Writer,
updateSeriesMetadataDto.SeriesMetadata!.Writers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Writer, updateSeriesMetadataDto.SeriesMetadata.Writers, series, allWriters.AsReadOnly(),
HandleAddPerson, () => series.Metadata.WriterLocked = true);
// Cover Artists
if (!series.Metadata.CoverArtistLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.CoverArtists, PersonRole.CoverArtist);
}
var allCharacters = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Character,
updateSeriesMetadataDto.SeriesMetadata!.Characters.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Character, updateSeriesMetadataDto.SeriesMetadata.Characters, series, allCharacters.AsReadOnly(),
HandleAddPerson, () => series.Metadata.CharacterLocked = true);
// Colorists
if (!series.Metadata.ColoristLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Colorists, PersonRole.Colorist);
}
var allColorists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Colorist,
updateSeriesMetadataDto.SeriesMetadata!.Colorists.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Colorist, updateSeriesMetadataDto.SeriesMetadata.Colorists, series, allColorists.AsReadOnly(),
HandleAddPerson, () => series.Metadata.ColoristLocked = true);
// Editors
if (!series.Metadata.EditorLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Editors, PersonRole.Editor);
}
var allEditors = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Editor,
updateSeriesMetadataDto.SeriesMetadata!.Editors.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Editor, updateSeriesMetadataDto.SeriesMetadata.Editors, series, allEditors.AsReadOnly(),
HandleAddPerson, () => series.Metadata.EditorLocked = true);
// Inkers
if (!series.Metadata.InkerLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Inkers, PersonRole.Inker);
}
var allInkers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Inker,
updateSeriesMetadataDto.SeriesMetadata!.Inkers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Inker, updateSeriesMetadataDto.SeriesMetadata.Inkers, series, allInkers.AsReadOnly(),
HandleAddPerson, () => series.Metadata.InkerLocked = true);
// Letterers
if (!series.Metadata.LettererLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Letterers, PersonRole.Letterer);
}
var allLetterers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Letterer,
updateSeriesMetadataDto.SeriesMetadata!.Letterers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Letterer, updateSeriesMetadataDto.SeriesMetadata.Letterers, series, allLetterers.AsReadOnly(),
HandleAddPerson, () => series.Metadata.LettererLocked = true);
// Pencillers
if (!series.Metadata.PencillerLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Pencillers, PersonRole.Penciller);
}
var allPencillers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Penciller,
updateSeriesMetadataDto.SeriesMetadata!.Pencillers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Penciller, updateSeriesMetadataDto.SeriesMetadata.Pencillers, series, allPencillers.AsReadOnly(),
HandleAddPerson, () => series.Metadata.PencillerLocked = true);
// Publishers
if (!series.Metadata.PublisherLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Publishers, PersonRole.Publisher);
}
var allPublishers = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Publisher,
updateSeriesMetadataDto.SeriesMetadata!.Publishers.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Publisher, updateSeriesMetadataDto.SeriesMetadata.Publishers, series, allPublishers.AsReadOnly(),
HandleAddPerson, () => series.Metadata.PublisherLocked = true);
// Imprints
if (!series.Metadata.ImprintLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Imprints, PersonRole.Imprint);
}
var allImprints = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Imprint,
updateSeriesMetadataDto.SeriesMetadata!.Imprints.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Imprint, updateSeriesMetadataDto.SeriesMetadata.Imprints, series, allImprints.AsReadOnly(),
HandleAddPerson, () => series.Metadata.ImprintLocked = true);
// Teams
if (!series.Metadata.TeamLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Teams, PersonRole.Team);
}
var allTeams = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Team,
updateSeriesMetadataDto.SeriesMetadata!.Imprints.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Team, updateSeriesMetadataDto.SeriesMetadata.Teams, series, allTeams.AsReadOnly(),
HandleAddPerson, () => series.Metadata.TeamLocked = true);
// Locations
if (!series.Metadata.LocationLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Locations, PersonRole.Location);
}
var allLocations = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Location,
updateSeriesMetadataDto.SeriesMetadata!.Imprints.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Location, updateSeriesMetadataDto.SeriesMetadata.Locations, series, allLocations.AsReadOnly(),
HandleAddPerson, () => series.Metadata.LocationLocked = true);
// Translators
if (!series.Metadata.TranslatorLocked)
{
await HandlePeopleUpdateAsync(series.Metadata, updateSeriesMetadataDto.SeriesMetadata.Translators, PersonRole.Translator);
}
var allTranslators = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.Translator,
updateSeriesMetadataDto.SeriesMetadata!.Translators.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.Translator, updateSeriesMetadataDto.SeriesMetadata.Translators, series, allTranslators.AsReadOnly(),
HandleAddPerson, () => series.Metadata.TranslatorLocked = true);
var allCoverArtists = await _unitOfWork.PersonRepository.GetAllPeopleByRoleAndNames(PersonRole.CoverArtist,
updateSeriesMetadataDto.SeriesMetadata!.CoverArtists.Select(p => Parser.Normalize(p.Name)));
PersonHelper.UpdatePeopleList(PersonRole.CoverArtist, updateSeriesMetadataDto.SeriesMetadata.CoverArtists, series, allCoverArtists.AsReadOnly(),
HandleAddPerson, () => series.Metadata.CoverArtistLocked = true);
}
series.Metadata.AgeRatingLocked = updateSeriesMetadataDto.SeriesMetadata.AgeRatingLocked;
@ -321,6 +325,90 @@ public class SeriesService : ISeriesService
return false;
}
/// <summary>
/// Exclusively for Series Update API
/// </summary>
/// <param name="metadata"></param>
/// <param name="peopleDtos"></param>
/// <param name="role"></param>
private async Task HandlePeopleUpdateAsync(SeriesMetadata metadata, ICollection<PersonDto> peopleDtos, PersonRole role)
{
// Normalize all names from the DTOs
var normalizedNames = peopleDtos.Select(p => Parser.Normalize(p.Name)).ToList();
// Bulk select people who already exist in the database
var existingPeople = await _unitOfWork.PersonRepository.GetPeopleByNames(normalizedNames);
// Use a dictionary for quick lookups
var existingPeopleDictionary = existingPeople.ToDictionary(p => p.NormalizedName, p => p);
// List to track people that will be added to the metadata
var peopleToAdd = new List<Person>();
foreach (var personDto in peopleDtos)
{
var normalizedPersonName = Parser.Normalize(personDto.Name);
// Check if the person exists in the dictionary
if (existingPeopleDictionary.TryGetValue(normalizedPersonName, out _)) continue;
// Person doesn't exist, so create a new one
var newPerson = new Person
{
Name = personDto.Name,
NormalizedName = normalizedPersonName
};
peopleToAdd.Add(newPerson);
existingPeopleDictionary[normalizedPersonName] = newPerson;
}
// Add any new people to the database in bulk
if (peopleToAdd.Count != 0)
{
_unitOfWork.PersonRepository.Attach(peopleToAdd);
}
// Now that we have all the people (new and existing), update the SeriesMetadataPeople
UpdateSeriesMetadataPeople(metadata, metadata.People, existingPeopleDictionary.Values, role);
}
private static void UpdateSeriesMetadataPeople(SeriesMetadata metadata, ICollection<SeriesMetadataPeople> metadataPeople, IEnumerable<Person> people, PersonRole role)
{
var peopleToAdd = people.ToList();
// Remove any people in the existing metadataPeople for this role that are no longer present in the input list
var peopleToRemove = metadataPeople
.Where(mp => mp.Role == role && peopleToAdd.TrueForAll(p => p.NormalizedName != mp.Person.NormalizedName))
.ToList();
foreach (var personToRemove in peopleToRemove)
{
metadataPeople.Remove(personToRemove);
}
// Add new people for this role if they don't already exist
foreach (var person in peopleToAdd)
{
var existingPersonEntry = metadataPeople
.FirstOrDefault(mp => mp.Person.NormalizedName == person.NormalizedName && mp.Role == role);
if (existingPersonEntry == null)
{
metadataPeople.Add(new SeriesMetadataPeople
{
PersonId = person.Id,
Person = person,
SeriesMetadataId = metadata.Id,
SeriesMetadata = metadata,
Role = role
});
}
}
}
/// <summary>
///
/// </summary>
@ -384,6 +472,7 @@ public class SeriesService : ISeriesService
allChapterIds.AddRange(mapping.Value);
}
// NOTE: This isn't getting all the people and whatnot currently
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdsAsync(seriesIds);
_unitOfWork.SeriesRepository.Remove(series);

View File

@ -282,10 +282,11 @@ public class TaskScheduler : ITaskScheduler
{
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
var normalizedOriginal = Tasks.Scanner.Parser.Parser.NormalizePath(originalPath);
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, normalizedOriginal]) ||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, string.Empty]))
{
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
_logger.LogDebug("Skipped scheduling ScanFolder for {Folder} as a job already queued",
normalizedFolder);
return;
}
@ -293,9 +294,6 @@ public class TaskScheduler : ITaskScheduler
// Not sure where we should put this code, but we can get a bunch of ScanFolders when original has slight variations, like
// create a folder, add a new file, etc. All of these can be merged into just 1 request.
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
BackgroundJob.Schedule(() => _scannerService.ScanFolder(normalizedFolder, normalizedOriginal), delay);
}
@ -305,7 +303,7 @@ public class TaskScheduler : ITaskScheduler
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", [normalizedFolder, string.Empty]))
{
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
_logger.LogDebug("Skipped scheduling ScanFolder for {Folder} as a job already queued",
normalizedFolder);
return;
}

View File

@ -278,7 +278,7 @@ public class LibraryWatcher : ILibraryWatcher
_logger.LogTrace("Folder path: {FolderPath}", fullPath);
if (string.IsNullOrEmpty(fullPath))
{
_logger.LogTrace("[LibraryWatcher] Change from {FilePath} could not find root level folder, ignoring change", filePath);
_logger.LogInformation("[LibraryWatcher] Change from {FilePath} could not find root level folder, ignoring change", filePath);
return;
}

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
@ -121,7 +122,7 @@ public class ParseScannedFiles
/// <param name="seriesPaths">A dictionary mapping a normalized path to a list of <see cref="SeriesModified"/> to help scanner skip I/O</param>
/// <param name="folderPath">A library folder or series folder</param>
/// <param name="forceCheck">If we should bypass any folder last write time checks on the scan and force I/O</param>
public async Task<IList<ScanResult>> ProcessFiles(string folderPath, bool scanDirectoryByDirectory,
public async Task<IList<ScanResult>> ScanFiles(string folderPath, bool scanDirectoryByDirectory,
IDictionary<string, IList<SeriesModified>> seriesPaths, Library library, bool forceCheck = false)
{
var fileExtensions = string.Join("|", library.LibraryFileTypes.Select(l => l.FileTypeGroup.GetRegex()));
@ -138,69 +139,128 @@ public class ParseScannedFiles
return await ScanSingleDirectory(folderPath, seriesPaths, library, forceCheck, result, fileExtensions, matcher);
}
private async Task<IList<ScanResult>> ScanDirectories(string folderPath, IDictionary<string, IList<SeriesModified>> seriesPaths, Library library, bool forceCheck,
GlobMatcher matcher, List<ScanResult> result, string fileExtensions)
private async Task<IList<ScanResult>> ScanDirectories(string folderPath, IDictionary<string, IList<SeriesModified>> seriesPaths,
Library library, bool forceCheck, GlobMatcher matcher, List<ScanResult> result, string fileExtensions)
{
var directories = _directoryService.GetDirectories(folderPath, matcher).Select(Parser.Parser.NormalizePath);
foreach (var directory in directories)
var allDirectories = _directoryService.GetAllDirectories(folderPath, matcher)
.Select(Parser.Parser.NormalizePath)
.OrderByDescending(d => d.Length)
.ToList();
var processedDirs = new HashSet<string>();
_logger.LogDebug("[ScannerService] Step 1.C Found {DirectoryCount} directories to process for {FolderPath}", allDirectories.Count, folderPath);
foreach (var directory in allDirectories)
{
// Don't process any folders where we've already scanned everything below
if (processedDirs.Any(d => d.StartsWith(directory + Path.AltDirectorySeparatorChar) || d.Equals(directory)))
{
// Skip this directory as we've already processed a parent unless there are loose files at that directory
CheckSurfaceFiles(result, directory, folderPath, fileExtensions, matcher);
continue;
}
// Skip directories ending with "Specials", let the parent handle it
if (directory.EndsWith("Specials", StringComparison.OrdinalIgnoreCase))
{
// Log or handle that we are skipping this directory
_logger.LogDebug("Skipping {Directory} as it ends with 'Specials'", directory);
continue;
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(directory, library.Name, ProgressEventType.Updated));
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, directory, forceCheck))
{
if (result.Exists(r => r.Folder == directory))
{
_logger.LogDebug("[ProcessFiles] Skipping adding {Directory} as it's already added", directory);
continue;
}
_logger.LogDebug("[ProcessFiles] Skipping {Directory} as it hasn't changed since last scan", directory);
result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty));
}
else if (!forceCheck && seriesPaths.TryGetValue(directory, out var series)
&& series.Count > 1 && series.All(s => !string.IsNullOrEmpty(s.LowestFolderPath)))
{
// If there are multiple series inside this path, let's check each of them to see which was modified and only scan those
// This is very helpful for ComicVine libraries by Publisher
// TODO: BUG: We might miss new folders this way. Likely need to get all folder names and see if there are any that aren't in known series list
_logger.LogDebug("[ProcessFiles] {Directory} is dirty and has multiple series folders, checking if we can avoid a full scan", directory);
foreach (var seriesModified in series)
{
var hasFolderChangedSinceLastScan = seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) <
_directoryService
.GetLastWriteTime(seriesModified.LowestFolderPath!)
.Truncate(TimeSpan.TicksPerSecond);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(seriesModified.LowestFolderPath!, library.Name, ProgressEventType.Updated));
if (!hasFolderChangedSinceLastScan)
{
_logger.LogDebug("[ProcessFiles] {Directory} subfolder {Folder} did not change since last scan, adding entry to skip", directory, seriesModified.LowestFolderPath);
result.Add(CreateScanResult(seriesModified.LowestFolderPath!, folderPath, false, ArraySegment<string>.Empty));
}
else
{
_logger.LogDebug("[ProcessFiles] {Directory} subfolder {Folder} changed for Series {SeriesName}", directory, seriesModified.LowestFolderPath, seriesModified.SeriesName);
result.Add(CreateScanResult(directory, folderPath, true,
_directoryService.ScanFiles(seriesModified.LowestFolderPath!, fileExtensions, matcher)));
}
}
HandleUnchangedFolder(result, folderPath, directory);
}
else
{
_logger.LogDebug("[ProcessFiles] Performing file scan on {Directory}", directory);
var files = _directoryService.ScanFiles(directory, fileExtensions, matcher);
result.Add(CreateScanResult(directory, folderPath, true, files));
PerformFullScan(result, directory, folderPath, fileExtensions, matcher);
}
processedDirs.Add(directory);
}
return result;
}
/// <summary>
/// Checks against all folder paths on file if the last scanned is >= the directory's last write time, down to the second
/// </summary>
/// <param name="seriesPaths"></param>
/// <param name="directory">This should be normalized</param>
/// <param name="forceCheck"></param>
/// <returns></returns>
private bool HasSeriesFolderNotChangedSinceLastScan(IDictionary<string, IList<SeriesModified>> seriesPaths, string directory, bool forceCheck)
{
// With the bottom-up approach, this can report a false positive where a nested folder will get scanned even though a parent is the series
// This can't really be avoided. This is more likely to happen on Image chapter folder library layouts.
if (forceCheck || !seriesPaths.TryGetValue(directory, out var seriesList))
{
return false;
}
foreach (var series in seriesList)
{
var lastWriteTime = _directoryService.GetLastWriteTime(series.LowestFolderPath!).Truncate(TimeSpan.TicksPerSecond);
var seriesLastScanned = series.LastScanned.Truncate(TimeSpan.TicksPerSecond);
if (seriesLastScanned < lastWriteTime)
{
return false;
}
}
return true;
}
/// <summary>
/// Handles directories that haven't changed since the last scan.
/// </summary>
private void HandleUnchangedFolder(List<ScanResult> result, string folderPath, string directory)
{
if (result.Exists(r => r.Folder == directory))
{
_logger.LogDebug("[ProcessFiles] Skipping adding {Directory} as it's already added, this indicates a bad layout issue", directory);
}
else
{
_logger.LogDebug("[ProcessFiles] Skipping {Directory} as it hasn't changed since last scan", directory);
result.Add(CreateScanResult(directory, folderPath, false, ArraySegment<string>.Empty));
}
}
/// <summary>
/// Performs a full scan of the directory and adds it to the result.
/// </summary>
private void PerformFullScan(List<ScanResult> result, string directory, string folderPath, string fileExtensions, GlobMatcher matcher)
{
_logger.LogDebug("[ProcessFiles] Performing full scan on {Directory}", directory);
var files = _directoryService.ScanFiles(directory, fileExtensions, matcher);
if (files.Count == 0)
{
_logger.LogDebug("[ProcessFiles] Empty directory: {Directory}. Keeping empty will cause Kavita to scan this each time", directory);
}
result.Add(CreateScanResult(directory, folderPath, true, files));
}
/// <summary>
/// Performs a full scan of the directory and adds it to the result.
/// </summary>
private void CheckSurfaceFiles(List<ScanResult> result, string directory, string folderPath, string fileExtensions, GlobMatcher matcher)
{
var files = _directoryService.ScanFiles(directory, fileExtensions, matcher, SearchOption.TopDirectoryOnly);
if (files.Count == 0)
{
return;
}
result.Add(CreateScanResult(directory, folderPath, true, files));
}
/// <summary>
/// Scans a single directory and processes the scan result.
/// </summary>
private async Task<IList<ScanResult>> ScanSingleDirectory(string folderPath, IDictionary<string, IList<SeriesModified>> seriesPaths, Library library, bool forceCheck, List<ScanResult> result,
string fileExtensions, GlobMatcher matcher)
{
@ -249,6 +309,33 @@ public class ParseScannedFiles
};
}
/// <summary>
/// Processes scanResults to track all series across the combined results.
/// Ensures series are correctly grouped even if they span multiple folders.
/// </summary>
/// <param name="scanResults">A collection of scan results</param>
/// <param name="scannedSeries">A concurrent dictionary to store the tracked series</param>
private void TrackSeriesAcrossScanResults(IList<ScanResult> scanResults, ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries)
{
// Flatten all ParserInfos from scanResults
var allInfos = scanResults.SelectMany(sr => sr.ParserInfos).ToList();
// Iterate through each ParserInfo and track the series
foreach (var info in allInfos)
{
if (info == null) continue;
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] Exception occurred during tracking {FilePath}. Skipping this file", info?.FullFilePath);
}
}
}
/// <summary>
/// Attempts to either add a new instance of a series mapping to the _scannedSeries bag or adds to an existing.
@ -263,6 +350,8 @@ public class ParseScannedFiles
// Check if normalized info.Series already exists and if so, update info to use that name instead
info.Series = MergeName(scannedSeries, info);
// BUG: This will fail for Solo Leveling & Solo Leveling (Manga)
var normalizedSeries = info.Series.ToNormalized();
var normalizedSortSeries = info.SeriesSort.ToNormalized();
var normalizedLocalizedSeries = info.LocalizedSeries.ToNormalized();
@ -293,13 +382,13 @@ public class ParseScannedFiles
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] {SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series);
_logger.LogCritical("[ScannerService] {SeriesName} matches against multiple series in the parsed series. This indicates a critical kavita issue. Key will be skipped", info.Series);
foreach (var seriesKey in scannedSeries.Keys.Where(ps =>
ps.Format == info.Format && (ps.NormalizedName.Equals(normalizedSeries)
|| ps.NormalizedName.Equals(normalizedLocalizedSeries)
|| ps.NormalizedName.Equals(normalizedSortSeries))))
{
_logger.LogCritical("[ScannerService] Matches: {SeriesName} matches on {SeriesKey}", info.Series, seriesKey.Name);
_logger.LogCritical("[ScannerService] Matches: '{SeriesName}' matches on '{SeriesKey}'", info.Series, seriesKey.Name);
}
}
}
@ -338,11 +427,12 @@ public class ParseScannedFiles
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath);
_logger.LogCritical("[ScannerService] Multiple series detected for {SeriesName} ({File})! This is critical to fix! There should only be 1", info.Series, info.FullFilePath);
var values = scannedSeries.Where(p =>
(p.Key.NormalizedName.ToNormalized() == normalizedSeries ||
p.Key.NormalizedName.ToNormalized() == normalizedLocalSeries) &&
p.Key.Format == info.Format);
foreach (var pair in values)
{
_logger.LogCritical("[ScannerService] Duplicate Series in DB matches with {SeriesName}: {DuplicateName}", info.Series, pair.Key.Name);
@ -353,7 +443,6 @@ public class ParseScannedFiles
return info.Series;
}
/// <summary>
/// This will process series by folder groups. This is used solely by ScanSeries
/// </summary>
@ -364,151 +453,306 @@ public class ParseScannedFiles
/// <param name="forceCheck">Defaults to false</param>
/// <returns></returns>
public async Task<IList<ScannedSeriesResult>> ScanLibrariesForSeries(Library library,
IEnumerable<string> folders, bool isLibraryScan,
IList<string> folders, bool isLibraryScan,
IDictionary<string, IList<SeriesModified>> seriesPaths, bool forceCheck = false)
{
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 1.A: Process {FolderCount} folders", library.Name, folders.Count());
var processedScannedSeries = new List<ScannedSeriesResult>();
//var processedScannedSeries = new ConcurrentBag<ScannedSeriesResult>();
foreach (var folderPath in folders)
var processedScannedSeries = new ConcurrentBag<ScannedSeriesResult>();
foreach (var folder in folders)
{
try
{
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.B: Scan files in {Folder}", library.Name, folderPath);
var scanResults = await ProcessFiles(folderPath, isLibraryScan, seriesPaths, library, forceCheck);
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.C: Process files in {Folder}", library.Name, folderPath);
foreach (var scanResult in scanResults)
{
await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
}
// This reduced a 1.1k series networked scan by a little more than 1 hour, but the order series were added to Kavita was not alphabetical
// await Task.WhenAll(scanResults.Select(async scanResult =>
// {
// await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
// }));
await ScanAndParseFolder(folder, library, isLibraryScan, seriesPaths, processedScannedSeries, forceCheck);
}
catch (ArgumentException ex)
{
_logger.LogError(ex, "[ScannerService] The directory '{FolderPath}' does not exist", folderPath);
_logger.LogError(ex, "[ScannerService] The directory '{FolderPath}' does not exist", folder);
}
}
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
return processedScannedSeries.ToList();
}
private async Task ParseAndTrackSeries(Library library, IDictionary<string, IList<SeriesModified>> seriesPaths, ScanResult scanResult,
List<ScannedSeriesResult> processedScannedSeries)
/// <summary>
/// Helper method to scan and parse a folder
/// </summary>
/// <param name="folderPath"></param>
/// <param name="library"></param>
/// <param name="isLibraryScan"></param>
/// <param name="seriesPaths"></param>
/// <param name="processedScannedSeries"></param>
/// <param name="forceCheck"></param>
private async Task ScanAndParseFolder(string folderPath, Library library,
bool isLibraryScan, IDictionary<string, IList<SeriesModified>> seriesPaths,
ConcurrentBag<ScannedSeriesResult> processedScannedSeries, bool forceCheck)
{
// scanResult is updated with the parsed infos
await ProcessScanResult(scanResult, seriesPaths, library); // NOTE: This may be able to be parallelized
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.B: Scan files in {Folder}", library.Name, folderPath);
var scanResults = await ScanFiles(folderPath, isLibraryScan, seriesPaths, library, forceCheck);
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
// Aggregate the scanned series across all scanResults
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
// Combine everything into scannedSeries
foreach (var info in scanResult.ParserInfos)
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.C: Process files in {Folder}", library.Name, folderPath);
foreach (var scanResult in scanResults)
{
try
{
TrackSeries(scannedSeries, info);
}
catch (Exception ex)
{
_logger.LogError(ex,
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
info?.FullFilePath);
}
await ParseFiles(scanResult, seriesPaths, library);
}
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.D: Merge any localized series with series {Folder}", library.Name, folderPath);
scanResults = MergeLocalizedSeriesAcrossScanResults(scanResults);
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.E: Group all parsed data into logical Series", library.Name);
TrackSeriesAcrossScanResults(scanResults, scannedSeries);
// Now transform and add to processedScannedSeries AFTER everything is processed
_logger.LogDebug("\t[ScannerService] Library {LibraryName} Step 1.F: Generate Sort Order for Series and Finalize", library.Name);
GenerateProcessedScannedSeries(scannedSeries, scanResults, processedScannedSeries);
}
/// <summary>
/// Processes and generates the final results for processedScannedSeries after updating sort order.
/// </summary>
/// <param name="scannedSeries">A concurrent dictionary of tracked series and their parsed infos</param>
/// <param name="scanResults">List of all scan results, used to determine if any series has changed</param>
/// <param name="processedScannedSeries">A thread-safe concurrent bag of processed series results</param>
private void GenerateProcessedScannedSeries(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, IList<ScanResult> scanResults, ConcurrentBag<ScannedSeriesResult> processedScannedSeries)
{
// First, update the sort order for all series
UpdateSeriesSortOrder(scannedSeries);
// Now, generate the final processed scanned series results
CreateFinalSeriesResults(scannedSeries, scanResults, processedScannedSeries);
}
/// <summary>
/// Updates the sort order for all series in the scannedSeries dictionary.
/// </summary>
/// <param name="scannedSeries">A concurrent dictionary of tracked series and their parsed infos</param>
private void UpdateSeriesSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries)
{
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count <= 0) continue;
try
{
UpdateSortOrder(scannedSeries, series);
UpdateSortOrder(scannedSeries, series); // Call to method that updates sort order
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue setting IssueOrder");
_logger.LogError(ex, "[ScannerService] Issue occurred while setting IssueOrder for series {SeriesName}", series.Name);
}
}
}
/// <summary>
/// Generates the final processed scanned series results after processing the sort order.
/// </summary>
/// <param name="scannedSeries">A concurrent dictionary of tracked series and their parsed infos</param>
/// <param name="scanResults">List of all scan results, used to determine if any series has changed</param>
/// <param name="processedScannedSeries">The list where processed results will be added</param>
private static void CreateFinalSeriesResults(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries,
IList<ScanResult> scanResults, ConcurrentBag<ScannedSeriesResult> processedScannedSeries)
{
foreach (var series in scannedSeries.Keys)
{
if (scannedSeries[series].Count <= 0) continue;
processedScannedSeries.Add(new ScannedSeriesResult()
processedScannedSeries.Add(new ScannedSeriesResult
{
HasChanged = scanResult.HasChanged,
HasChanged = scanResults.Any(sr => sr.HasChanged), // Combine HasChanged flag across all scanResults
ParsedSeries = series,
ParsedInfos = scannedSeries[series]
});
}
}
/// <summary>
/// Merges localized series with the series field across all scan results.
/// Combines ParserInfos from all scanResults and processes them collectively
/// to ensure consistent series names.
/// </summary>
/// <example>
/// Accel World v01.cbz has Series "Accel World" and Localized Series "World of Acceleration"
/// World of Acceleration v02.cbz has Series "World of Acceleration"
/// After running this code, we'd have:
/// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration"
/// </example>
/// <param name="scanResults">A collection of scan results</param>
/// <returns>A new list of scan results with merged series</returns>
private IList<ScanResult> MergeLocalizedSeriesAcrossScanResults(IList<ScanResult> scanResults)
{
// Flatten all ParserInfos across scanResults
var allInfos = scanResults.SelectMany(sr => sr.ParserInfos).ToList();
// Filter relevant infos (non-special and with localized series)
var relevantInfos = GetRelevantInfos(allInfos);
if (relevantInfos.Count == 0) return scanResults;
// Get distinct localized series and process each one
var distinctLocalizedSeries = relevantInfos
.Select(i => i.LocalizedSeries)
.Distinct()
.ToList();
foreach (var localizedSeries in distinctLocalizedSeries)
{
if (string.IsNullOrEmpty(localizedSeries)) continue;
// Process the localized series for merging
ProcessLocalizedSeries(scanResults, allInfos, relevantInfos, localizedSeries);
}
// Remove or clear any scan results that now have no ParserInfos after merging
return scanResults.Where(sr => sr.ParserInfos.Any()).ToList();
}
private static List<ParserInfo> GetRelevantInfos(List<ParserInfo> allInfos)
{
return allInfos
.Where(i => !i.IsSpecial && !string.IsNullOrEmpty(i.LocalizedSeries))
.GroupBy(i => i.Format)
.SelectMany(g => g.ToList())
.ToList();
}
private void ProcessLocalizedSeries(IList<ScanResult> scanResults, List<ParserInfo> allInfos, List<ParserInfo> relevantInfos, string localizedSeries)
{
var seriesForLocalized = GetSeriesForLocalized(relevantInfos, localizedSeries);
if (seriesForLocalized.Count == 0) return;
var nonLocalizedSeries = GetNonLocalizedSeries(seriesForLocalized, localizedSeries);
if (nonLocalizedSeries == null) return;
// Remap and update relevant ParserInfos
RemapSeries(scanResults, allInfos, localizedSeries, nonLocalizedSeries);
}
private static List<string> GetSeriesForLocalized(List<ParserInfo> relevantInfos, string localizedSeries)
{
return relevantInfos
.Where(i => i.LocalizedSeries == localizedSeries)
.DistinctBy(r => r.Series)
.Select(r => r.Series)
.ToList();
}
private string? GetNonLocalizedSeries(List<string> seriesForLocalized, string localizedSeries)
{
switch (seriesForLocalized.Count)
{
case 1:
return seriesForLocalized[0];
case <= 2:
return seriesForLocalized.FirstOrDefault(s => !s.Equals(Parser.Parser.Normalize(localizedSeries)));
default:
_logger.LogError(
"[ScannerService] Multiple series detected across scan results that contain localized series. " +
"This will cause them to group incorrectly. Please separate series into their own dedicated folder: {LocalizedSeries}",
string.Join(", ", seriesForLocalized)
);
return null;
}
}
private void RemapSeries(IList<ScanResult> scanResults, List<ParserInfo> allInfos, string localizedSeries, string nonLocalizedSeries)
{
// Find all infos that need to be remapped from the localized series to the non-localized series
var seriesToBeRemapped = allInfos.Where(i => i.Series.Equals(localizedSeries)).ToList();
foreach (var infoNeedingMapping in seriesToBeRemapped)
{
infoNeedingMapping.Series = nonLocalizedSeries;
// Find the scan result containing the localized info
var localizedScanResult = scanResults.FirstOrDefault(sr => sr.ParserInfos.Contains(infoNeedingMapping));
if (localizedScanResult == null) continue;
// Remove the localized series from this scan result
localizedScanResult.ParserInfos.Remove(infoNeedingMapping);
// Find the scan result that should be merged with
var nonLocalizedScanResult = scanResults.FirstOrDefault(sr => sr.ParserInfos.Any(pi => pi.Series == nonLocalizedSeries));
if (nonLocalizedScanResult == null) continue;
// Add the remapped info to the non-localized scan result
nonLocalizedScanResult.ParserInfos.Add(infoNeedingMapping);
// Assign the higher folder path (i.e., the one closer to the root)
//nonLocalizedScanResult.Folder = DirectoryService.GetDeepestCommonPath(localizedScanResult.Folder, nonLocalizedScanResult.Folder);
}
}
/// <summary>
/// For a given ScanResult, sets the ParserInfos on the result
/// </summary>
/// <param name="result"></param>
/// <param name="seriesPaths"></param>
/// <param name="library"></param>
private async Task ProcessScanResult(ScanResult result, IDictionary<string, IList<SeriesModified>> seriesPaths, Library library)
private async Task ParseFiles(ScanResult result, IDictionary<string, IList<SeriesModified>> seriesPaths, Library library)
{
// TODO: This should return the result as we are modifying it as a side effect
// If the folder hasn't changed, generate fake ParserInfos for the Series that were in that folder.
var normalizedFolder = Parser.Parser.NormalizePath(result.Folder);
// If folder hasn't changed, generate fake ParserInfos
if (!result.HasChanged)
{
result.ParserInfos = seriesPaths[normalizedFolder]
.Select(fp => new ParserInfo()
{
Series = fp.SeriesName,
Format = fp.Format,
})
.Select(fp => new ParserInfo { Series = fp.SeriesName, Format = fp.Format })
.ToList();
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed since last scan", normalizedFolder);
_logger.LogDebug("[ScannerService] Skipped File Scan for {Folder} as it hasn't changed", normalizedFolder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent("Skipped " + normalizedFolder, library.Name, ProgressEventType.Updated));
MessageFactory.FileScanProgressEvent($"Skipped {normalizedFolder}", library.Name, ProgressEventType.Updated));
return;
}
var files = result.Files;
var fileCount = files.Count;
// When processing files for a folder and we do enter, we need to parse the information and combine parser infos
// NOTE: We might want to move the merge step later in the process, like return and combine.
if (files.Count == 0)
if (fileCount == 0)
{
_logger.LogInformation("[ScannerService] {Folder} is empty, no longer in this location, or has no file types that match Library File Types", normalizedFolder);
_logger.LogInformation("[ScannerService] {Folder} is empty or has no matching file types", normalizedFolder);
result.ParserInfos = ArraySegment<ParserInfo>.Empty;
return;
}
_logger.LogDebug("[ScannerService] Found {Count} files for {Folder}", files.Count, normalizedFolder);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent($"{files.Count} files in {normalizedFolder}", library.Name, ProgressEventType.Updated));
MessageFactory.FileScanProgressEvent($"{fileCount} files in {normalizedFolder}", library.Name, ProgressEventType.Updated));
// Multiple Series can exist within a folder. We should instead put these infos on the result and perform merging above
IList<ParserInfo> infos = files
.Select(file => _readingItemService.ParseFile(file, normalizedFolder, result.LibraryRoot, library.Type))
.Where(info => info != null)
.ToList()!;
// Parse files into ParserInfos
if (fileCount < 100)
{
// Process files sequentially
result.ParserInfos = files
.Select(file => _readingItemService.ParseFile(file, normalizedFolder, result.LibraryRoot, library.Type))
.Where(info => info != null)
.ToList()!;
}
else
{
// Process files in parallel
var tasks = files.Select(file => Task.Run(() =>
_readingItemService.ParseFile(file, normalizedFolder, result.LibraryRoot, library.Type)));
result.ParserInfos = infos;
var infos = await Task.WhenAll(tasks);
result.ParserInfos = infos.Where(info => info != null).ToList()!;
}
}
public static void UpdateSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParsedSeries series)
private static void UpdateSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParsedSeries series)
{
// Set the Sort order per Volume
var volumes = scannedSeries[series].GroupBy(info => info.Volumes);
@ -586,96 +830,4 @@ public class ParseScannedFiles
}
}
}
private bool HasAllSeriesFolderNotChangedSinceLastScan(IList<SeriesModified> seriesFolders,
string normalizedFolder)
{
return seriesFolders.All(f => HasSeriesFolderNotChangedSinceLastScan(f, normalizedFolder));
}
/// <summary>
/// Checks against all folder paths on file if the last scanned is >= the directory's last write down to the second
/// </summary>
/// <param name="seriesPaths"></param>
/// <param name="normalizedFolder"></param>
/// <param name="forceCheck"></param>
/// <returns></returns>
private bool HasSeriesFolderNotChangedSinceLastScan(IDictionary<string, IList<SeriesModified>> seriesPaths, string normalizedFolder, bool forceCheck = false)
{
if (forceCheck) return false;
if (seriesPaths.TryGetValue(normalizedFolder, out var v))
{
return HasAllSeriesFolderNotChangedSinceLastScan(v, normalizedFolder);
}
return false;
}
private bool HasSeriesFolderNotChangedSinceLastScan(SeriesModified seriesModified, string normalizedFolder)
{
return seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) >=
_directoryService.GetLastWriteTime(normalizedFolder)
.Truncate(TimeSpan.TicksPerSecond);
}
/// <summary>
/// Checks if there are any ParserInfos that have a Series that matches the LocalizedSeries field in any other info. If so,
/// rewrites the infos with series name instead of the localized name, so they stack.
/// </summary>
/// <example>
/// Accel World v01.cbz has Series "Accel World" and Localized Series "World of Acceleration"
/// World of Acceleration v02.cbz has Series "World of Acceleration"
/// After running this code, we'd have:
/// World of Acceleration v02.cbz having Series "Accel World" and Localized Series of "World of Acceleration"
/// </example>
/// <param name="infos">A collection of ParserInfos</param>
private void MergeLocalizedSeriesWithSeries(IList<ParserInfo> infos)
{
var hasLocalizedSeries = infos.Any(i => !string.IsNullOrEmpty(i.LocalizedSeries));
if (!hasLocalizedSeries) return;
var localizedSeries = infos
.Where(i => !i.IsSpecial)
.Select(i => i.LocalizedSeries)
.Distinct()
.FirstOrDefault(i => !string.IsNullOrEmpty(i));
if (string.IsNullOrEmpty(localizedSeries)) return;
// NOTE: If we have multiple series in a folder with a localized title, then this will fail. It will group into one series. User needs to fix this themselves.
string? nonLocalizedSeries;
// Normalize this as many of the cases is a capitalization difference
var nonLocalizedSeriesFound = infos
.Where(i => !i.IsSpecial)
.Select(i => i.Series)
.DistinctBy(Parser.Parser.Normalize)
.ToList();
if (nonLocalizedSeriesFound.Count == 1)
{
nonLocalizedSeries = nonLocalizedSeriesFound[0];
}
else
{
// There can be a case where there are multiple series in a folder that causes merging.
if (nonLocalizedSeriesFound.Count > 2)
{
_logger.LogError("[ScannerService] There are multiple series within one folder that contain localized series. This will cause them to group incorrectly. Please separate series into their own dedicated folder or ensure there is only 2 potential series (localized and series): {LocalizedSeries}", string.Join(", ", nonLocalizedSeriesFound));
}
nonLocalizedSeries = nonLocalizedSeriesFound.Find(s => !s.Equals(localizedSeries));
}
if (nonLocalizedSeries == null) return;
var normalizedNonLocalizedSeries = nonLocalizedSeries.ToNormalized();
foreach (var infoNeedingMapping in infos.Where(i =>
!i.Series.ToNormalized().Equals(normalizedNonLocalizedSeries)))
{
infoNeedingMapping.Series = nonLocalizedSeries;
infoNeedingMapping.LocalizedSeries = localizedSeries;
}
}
}

View File

@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using API.Data.Metadata;
using API.Entities.Enums;
@ -79,7 +80,25 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
// NOTE: This uses rootPath. LibraryRoot works better for manga, but it's not always that way.
// It might be worth writing some logic if the file is a special, to take the folder above the Specials/
// if present
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
var tempRootPath = rootPath;
if (rootPath.EndsWith("Specials") || rootPath.EndsWith("Specials/"))
{
tempRootPath = rootPath.Replace("Specials", string.Empty).TrimEnd('/');
}
// Check if the folder the file exists in is Specials/ and if so, take the parent directory as series (cleaned)
var fileDirectory = Path.GetDirectoryName(filePath);
if (!string.IsNullOrEmpty(fileDirectory) &&
(fileDirectory.EndsWith("Specials", StringComparison.OrdinalIgnoreCase) ||
fileDirectory.EndsWith("Specials/", StringComparison.OrdinalIgnoreCase)))
{
ret.Series = Parser.CleanTitle(Directory.GetParent(fileDirectory)?.Name ?? string.Empty);
}
else
{
ParseFromFallbackFolders(filePath, tempRootPath, type, ref ret);
}
}
if (string.IsNullOrEmpty(ret.Series))

View File

@ -714,8 +714,9 @@ public static class Parser
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public static bool HasSpecialMarker(string filePath)
public static bool HasSpecialMarker(string? filePath)
{
if (string.IsNullOrEmpty(filePath)) return false;
return SpecialMarkerRegex.IsMatch(filePath);
}
@ -728,30 +729,19 @@ public static class Parser
public static bool IsSpecial(string? filePath, LibraryType type)
{
return type switch
{
LibraryType.Manga => IsMangaSpecial(filePath),
LibraryType.Comic => IsComicSpecial(filePath),
LibraryType.Book => IsMangaSpecial(filePath),
LibraryType.Image => IsMangaSpecial(filePath),
LibraryType.LightNovel => IsMangaSpecial(filePath),
LibraryType.ComicVine => IsComicSpecial(filePath),
_ => false
};
return HasSpecialMarker(filePath);
}
private static bool IsMangaSpecial(string? filePath)
{
if (string.IsNullOrEmpty(filePath)) return false;
filePath = ReplaceUnderscores(filePath);
return MangaSpecialRegex.IsMatch(filePath);
return HasSpecialMarker(filePath);
}
private static bool IsComicSpecial(string? filePath)
{
if (string.IsNullOrEmpty(filePath)) return false;
filePath = ReplaceUnderscores(filePath);
return ComicSpecialRegex.IsMatch(filePath);
return HasSpecialMarker(filePath);
}

View File

@ -59,7 +59,13 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc
ret.Chapters = Parser.DefaultChapter;
ret.Volumes = Parser.SpecialVolume;
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
var tempRootPath = rootPath;
if (rootPath.EndsWith("Specials") || rootPath.EndsWith("Specials/"))
{
tempRootPath = rootPath.Replace("Specials", string.Empty).TrimEnd('/');
}
ParseFromFallbackFolders(filePath, tempRootPath, type, ref ret);
}
if (ret.Chapters == Parser.DefaultChapter && ret.Volumes == Parser.LooseLeafVolume && type == LibraryType.Book)

File diff suppressed because it is too large Load Diff

View File

@ -1,268 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Builders;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner;
#nullable enable
public interface ITagManagerService
{
/// <summary>
/// Should be called once before any usage
/// </summary>
/// <returns></returns>
Task Prime();
/// <summary>
/// Should be called after all work is done, will free up memory
/// </summary>
/// <returns></returns>
void Reset();
Task<Genre?> GetGenre(string genre);
Task<Tag?> GetTag(string tag);
Task<Person?> GetPerson(string name, PersonRole role);
Task<Tuple<AppUserCollection?, bool>> GetCollectionTag(string? tag, AppUser userWithCollections);
}
/// <summary>
/// This is responsible for handling existing and new tags during the scan. When a new tag doesn't exist, it will create it.
/// This is Thread Safe.
/// </summary>
public class TagManagerService : ITagManagerService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<TagManagerService> _logger;
private Dictionary<string, Genre> _genres;
private Dictionary<string, Tag> _tags;
private Dictionary<string, Person> _people;
private Dictionary<string, AppUserCollection> _collectionTags;
private readonly SemaphoreSlim _genreSemaphore = new SemaphoreSlim(1, 1);
private readonly SemaphoreSlim _tagSemaphore = new SemaphoreSlim(1, 1);
private readonly SemaphoreSlim _personSemaphore = new SemaphoreSlim(1, 1);
private readonly SemaphoreSlim _collectionTagSemaphore = new SemaphoreSlim(1, 1);
public TagManagerService(IUnitOfWork unitOfWork, ILogger<TagManagerService> logger)
{
_unitOfWork = unitOfWork;
_logger = logger;
Reset();
}
public void Reset()
{
_genres = [];
_tags = [];
_people = [];
_collectionTags = [];
}
public async Task Prime()
{
_genres = (await _unitOfWork.GenreRepository.GetAllGenresAsync()).ToDictionary(t => t.NormalizedTitle);
_tags = (await _unitOfWork.TagRepository.GetAllTagsAsync()).ToDictionary(t => t.NormalizedTitle);
_people = (await _unitOfWork.PersonRepository.GetAllPeople())
.GroupBy(GetPersonKey)
.Select(g => g.First())
.ToDictionary(GetPersonKey);
var defaultAdmin = await _unitOfWork.UserRepository.GetDefaultAdminUser()!;
_collectionTags = (await _unitOfWork.CollectionTagRepository.GetCollectionsForUserAsync(defaultAdmin.Id, CollectionIncludes.Series))
.ToDictionary(t => t.NormalizedTitle);
}
/// <summary>
/// Gets the Genre entity for the given string. If one doesn't exist, one will be created and committed.
/// </summary>
/// <param name="genre"></param>
/// <returns></returns>
public async Task<Genre?> GetGenre(string genre)
{
if (string.IsNullOrEmpty(genre)) return null;
await _genreSemaphore.WaitAsync();
try
{
if (_genres.TryGetValue(genre.ToNormalized(), out var result))
{
return result;
}
// We need to create a new Genre
result = new GenreBuilder(genre).Build();
_unitOfWork.GenreRepository.Attach(result);
await _unitOfWork.CommitAsync();
_genres.Add(result.NormalizedTitle, result);
return result;
}
finally
{
_genreSemaphore.Release();
}
}
/// <summary>
/// Gets the Tag entity for the given string. If one doesn't exist, one will be created and committed.
/// </summary>
/// <param name="tag"></param>
/// <returns></returns>
public async Task<Tag?> GetTag(string tag)
{
if (string.IsNullOrEmpty(tag)) return null;
await _tagSemaphore.WaitAsync();
try
{
if (_tags.TryGetValue(tag.ToNormalized(), out var result))
{
return result;
}
// We need to create a new Genre
result = new TagBuilder(tag).Build();
_unitOfWork.TagRepository.Attach(result);
await _unitOfWork.CommitAsync();
_tags.Add(result.NormalizedTitle, result);
return result;
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an exception when creating a new Tag. Scan again to get this included: {Tag}", tag);
return null;
}
finally
{
_tagSemaphore.Release();
}
}
/// <summary>
/// Gets the Person entity for the given string and role. If one doesn't exist, one will be created and committed.
/// </summary>
/// <param name="name">Person Name</param>
/// <param name="role"></param>
/// <returns></returns>
public async Task<Person?> GetPerson(string name, PersonRole role)
{
if (string.IsNullOrEmpty(name)) return null;
await _personSemaphore.WaitAsync();
try
{
var key = GetPersonKey(name.ToNormalized(), role);
if (_people.TryGetValue(key, out var result))
{
return result;
}
// We need to create a new Genre
result = new PersonBuilder(name, role).Build();
_unitOfWork.PersonRepository.Attach(result);
await _unitOfWork.CommitAsync();
_people.Add(key, result);
return result;
}
catch (DbUpdateConcurrencyException ex)
{
foreach (var entry in ex.Entries)
{
if (entry.Entity is Person)
{
var proposedValues = entry.CurrentValues;
var databaseValues = await entry.GetDatabaseValuesAsync();
foreach (var property in proposedValues.Properties)
{
var proposedValue = proposedValues[property];
var databaseValue = databaseValues[property];
// TODO: decide which value should be written to database
_logger.LogDebug(ex, "There was an exception when creating a new Person: {PersonName} ({Role})", name, role);
_logger.LogDebug("Property conflict, proposed: {Proposed} vs db: {Database}", proposedValue, databaseValue);
// proposedValues[property] = <value to be saved>;
}
// Refresh original values to bypass next concurrency check
entry.OriginalValues.SetValues(databaseValues);
//return (Person) entry.Entity;
return null;
}
// else
// {
// throw new NotSupportedException(
// "Don't know how to handle concurrency conflicts for "
// + entry.Metadata.Name);
// }
}
return null;
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an exception when creating a new Person. Scan again to get this included: {PersonName} ({Role})", name, role);
return null;
}
finally
{
_personSemaphore.Release();
}
}
private static string GetPersonKey(string normalizedName, PersonRole role)
{
return normalizedName + "_" + role;
}
private static string GetPersonKey(Person p)
{
return GetPersonKey(p.NormalizedName, p.Role);
}
/// <summary>
/// Gets the CollectionTag entity for the given string. If one doesn't exist, one will be created and committed.
/// </summary>
/// <param name="tag"></param>
/// <returns></returns>
public async Task<Tuple<AppUserCollection?, bool>> GetCollectionTag(string? tag, AppUser userWithCollections)
{
if (string.IsNullOrEmpty(tag)) return Tuple.Create<AppUserCollection?, bool>(null, false);
await _collectionTagSemaphore.WaitAsync();
AppUserCollection? result;
try
{
if (_collectionTags.TryGetValue(tag.ToNormalized(), out result))
{
return Tuple.Create<AppUserCollection?, bool>(result, false);
}
// We need to create a new Genre
result = new AppUserCollectionBuilder(tag).Build();
userWithCollections.Collections.Add(result);
_unitOfWork.UserRepository.Update(userWithCollections);
await _unitOfWork.CommitAsync();
_collectionTags.Add(result.NormalizedTitle, result);
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an exception when creating a new Collection. Scan again to get this included: {Tag}", tag);
return Tuple.Create<AppUserCollection?, bool>(null, false);
}
finally
{
_collectionTagSemaphore.Release();
}
return Tuple.Create<AppUserCollection?, bool>(result, true);
}
}

View File

@ -12,6 +12,7 @@ using API.Entities;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers;
using API.Helpers.Builders;
using API.Services.Tasks.Metadata;
using API.Services.Tasks.Scanner;
using API.Services.Tasks.Scanner.Parser;
@ -156,14 +157,14 @@ public class ScannerService : IScannerService
}
}
// TODO: Figure out why we have the library type restriction here
if (series != null)// && series.Library.Type is not (LibraryType.Book or LibraryType.LightNovel)
if (series != null)
{
if (TaskScheduler.HasScanTaskRunningForSeries(series.Id))
{
_logger.LogDebug("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
return;
}
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder}, Series matched to folder and ScanSeries enqueued for 1 minute", folder);
BackgroundJob.Schedule(() => ScanSeries(series.Id, true), TimeSpan.FromMinutes(1));
return;
@ -226,12 +227,14 @@ public class ScannerService : IScannerService
return;
}
// TODO: We need to refactor this to handle the path changes better
var folderPath = series.LowestFolderPath ?? series.FolderPath;
if (string.IsNullOrEmpty(folderPath) || !_directoryService.Exists(folderPath))
{
// We don't care if it's multiple due to new scan loop enforcing all in one root directory
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(libraryPaths, files.Select(f => f.FilePath).ToList());
var seriesDirs = _directoryService.FindHighestDirectoriesFromFiles(libraryPaths,
files.Select(f => f.FilePath).ToList());
if (seriesDirs.Keys.Count == 0)
{
_logger.LogCritical("Scan Series has files spread outside a main series folder. Defaulting to library folder (this is expensive)");
@ -257,23 +260,15 @@ public class ScannerService : IScannerService
return;
}
// If the series path doesn't exist anymore, it was either moved or renamed. We need to essentially delete it
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name, 1));
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var (scanElapsedTime, processedSeries) = await ScanFiles(library, new []{ folderPath },
var (scanElapsedTime, parsedSeries) = await ScanFiles(library, [folderPath],
false, true);
// Transform seen series into the parsedSeries (I think we can actually just have processedSeries be used instead
TrackFoundSeriesAndFiles(parsedSeries, processedSeries);
_logger.LogInformation("ScanFiles for {Series} took {Time} milliseconds", series.Name, scanElapsedTime);
// We now technically have all scannedSeries, we could invoke each Series to be scanned
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
RemoveParsedInfosNotForSeries(parsedSeries, series);
@ -309,32 +304,23 @@ public class ScannerService : IScannerService
}
}
// At this point, parsedSeries will have at least one key and we can perform the update. If it still doesn't, just return and don't do anything
if (parsedSeries.Count == 0) return;
// At this point, parsedSeries will have at least one key then we can perform the update. If it still doesn't, just return and don't do anything
// Don't allow any processing on files that aren't part of this series
var toProcess = parsedSeries.Keys.Where(key =>
key.NormalizedName.Equals(series.NormalizedName) ||
key.NormalizedName.Equals(series.OriginalName?.ToNormalized()))
.ToList();
if (toProcess.Count > 0)
{
await _processSeries.Prime();
}
var seriesLeftToProcess = toProcess.Count;
foreach (var pSeries in toProcess)
{
// Process Series
var seriesProcessStopWatch = Stopwatch.StartNew();
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, seriesLeftToProcess, bypassFolderOptimizationChecks);
_logger.LogDebug("[TIME] Kavita took {Time} ms to process {SeriesName}", seriesProcessStopWatch.ElapsedMilliseconds, parsedSeries[pSeries][0].Series);
seriesLeftToProcess--;
}
_processSeries.Reset();
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name, 0));
// Tell UI that this series is done
@ -347,13 +333,16 @@ public class ScannerService : IScannerService
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
}
private void TrackFoundSeriesAndFiles(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, IList<ScannedSeriesResult> seenSeries)
private static Dictionary<ParsedSeries, IList<ParserInfo>> TrackFoundSeriesAndFiles(IList<ScannedSeriesResult> seenSeries)
{
foreach (var series in seenSeries.Where(s => s.ParsedInfos.Count > 0))
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
foreach (var series in seenSeries.Where(s => s.ParsedInfos.Count > 0 && s.HasChanged))
{
var parsedFiles = series.ParsedInfos;
parsedSeries.Add(series.ParsedSeries, parsedFiles);
}
return parsedSeries;
}
private async Task<ScanCancelReason> ShouldScanSeries(int seriesId, Library library, IList<string> libraryPaths, Series series, bool bypassFolderChecks = false)
@ -493,7 +482,7 @@ public class ScannerService : IScannerService
await ScanLibrary(lib.Id, forceUpdate, true);
}
_processSeries.Reset();
_logger.LogInformation("[ScannerService] Scan of All Libraries Finished");
}
@ -530,30 +519,20 @@ public class ScannerService : IScannerService
}
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 1: Scan Files", library.Name);
var (scanElapsedTime, processedSeries) = await ScanFiles(library, libraryFolderPaths,
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 1: Scan & Parse Files", library.Name);
var (scanElapsedTime, parsedSeries) = await ScanFiles(library, libraryFolderPaths,
shouldUseLibraryScan, forceUpdate);
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 2: Track Found Series", library.Name);
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
TrackFoundSeriesAndFiles(parsedSeries, processedSeries);
// We need to remove any keys where there is no actual parser info
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 3: Process Parsed Series", library.Name);
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 2: Process and Update Database", library.Name);
var totalFiles = await ProcessParsedSeries(forceUpdate, parsedSeries, library, scanElapsedTime);
UpdateLastScanned(library);
_unitOfWork.LibraryRepository.Update(library);
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 4: Save Library", library.Name);
_logger.LogDebug("[ScannerService] Library {LibraryName} Step 3: Save Library", library.Name);
if (await _unitOfWork.CommitAsync())
{
if (isSingleScan)
{
_processSeries.Reset();
}
if (totalFiles == 0)
{
_logger.LogInformation(
@ -587,54 +566,88 @@ public class ScannerService : IScannerService
{
try
{
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
// NOTE: This implementation is expensive
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
removedSeries.Count, removedSeries.Select(s => s.Name));
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
_logger.LogDebug("[ScannerService] Removing series that were not found during the scan");
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
_logger.LogDebug("[ScannerService] Found {Count} series to remove: {SeriesList}",
removedSeries.Count, string.Join(", ", removedSeries.Select(s => s.Name)));
// Commit the changes
await _unitOfWork.CommitAsync();
foreach (var s in removedSeries)
// Notify for each removed series
foreach (var series in removedSeries)
{
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
await _eventHub.SendMessageAsync(
MessageFactory.SeriesRemoved,
MessageFactory.SeriesRemovedEvent(series.Id, series.Name, series.LibraryId),
false
);
}
_logger.LogDebug("[ScannerService] Series removal process completed");
}
catch (Exception ex)
{
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
_logger.LogCritical(ex, "[ScannerService] Error during series cleanup. Please check logs and rescan");
}
}
private async Task<int> ProcessParsedSeries(bool forceUpdate, Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library, long scanElapsedTime)
{
var toProcess = parsedSeries.Keys
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
.ToList();
// Iterate over the dictionary and remove only the ParserInfos that don't need processing
var toProcess = new Dictionary<ParsedSeries, IList<ParserInfo>>();
var scanSw = Stopwatch.StartNew();
foreach (var series in parsedSeries)
{
// Filter out ParserInfos where FullFilePath is empty (i.e., folder not modified)
var validInfos = series.Value.Where(info => !string.IsNullOrEmpty(info.Filename)).ToList();
if (validInfos.Count != 0)
{
toProcess[series.Key] = validInfos;
}
}
if (toProcess.Count > 0)
{
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
await _processSeries.Prime();
// For all Genres in the ParserInfos, do a bulk check against the DB on what is not in the DB and create them
// This will ensure all Genres are pre-created and allow our Genre lookup (and Priming) to be much simpler. It will be slower, but more consistent.
var allGenres = toProcess
.SelectMany(s => s.Value
.SelectMany(p => p.ComicInfo?.Genre?
.Split(",", StringSplitOptions.RemoveEmptyEntries) // Split on comma and remove empty entries
.Select(g => g.Trim()) // Trim each genre
.Where(g => !string.IsNullOrWhiteSpace(g)) // Ensure no null/empty genres
?? [])); // Handle null Genre or ComicInfo safely
await CreateAllGenresAsync(allGenres.Distinct().ToList());
var allTags = toProcess
.SelectMany(s => s.Value
.SelectMany(p => p.ComicInfo?.Tags?
.Split(",", StringSplitOptions.RemoveEmptyEntries) // Split on comma and remove empty entries
.Select(g => g.Trim()) // Trim each genre
.Where(g => !string.IsNullOrWhiteSpace(g)) // Ensure no null/empty genres
?? [])); // Handle null Tag or ComicInfo safely
await CreateAllTagsAsync(allTags.Distinct().ToList());
}
var totalFiles = 0;
//var tasks = new List<Task>();
var seriesLeftToProcess = toProcess.Count;
_logger.LogInformation("[ScannerService] Found {SeriesCount} Series that need processing in {Time} ms", toProcess.Count, scanSw.ElapsedMilliseconds + scanElapsedTime);
foreach (var pSeries in toProcess)
{
totalFiles += parsedSeries[pSeries].Count;
//tasks.Add(_processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate));
// We can't do Task.WhenAll because of concurrency issues.
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, seriesLeftToProcess, forceUpdate);
totalFiles += pSeries.Value.Count;
var seriesProcessStopWatch = Stopwatch.StartNew();
await _processSeries.ProcessSeriesAsync(pSeries.Value, library, seriesLeftToProcess, forceUpdate);
_logger.LogDebug("[TIME] Kavita took {Time} ms to process {SeriesName}", seriesProcessStopWatch.ElapsedMilliseconds, pSeries.Value[0].Series);
seriesLeftToProcess--;
}
//await Task.WhenAll(tasks);
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
@ -644,6 +657,7 @@ public class ScannerService : IScannerService
return totalFiles;
}
private static void UpdateLastScanned(Library library)
{
var time = DateTime.Now;
@ -655,7 +669,7 @@ public class ScannerService : IScannerService
library.UpdateLastScanned(time);
}
private async Task<Tuple<long, IList<ScannedSeriesResult>>> ScanFiles(Library library, IEnumerable<string> dirs,
private async Task<Tuple<long, Dictionary<ParsedSeries, IList<ParserInfo>>>> ScanFiles(Library library, IList<string> dirs,
bool isLibraryScan, bool forceChecks = false)
{
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService, _eventHub);
@ -666,12 +680,74 @@ public class ScannerService : IScannerService
var scanElapsedTime = scanWatch.ElapsedMilliseconds;
return Tuple.Create(scanElapsedTime, processedSeries);
var parsedSeries = TrackFoundSeriesAndFiles(processedSeries);
return Tuple.Create(scanElapsedTime, parsedSeries);
}
public static IEnumerable<Series> FindSeriesNotOnDisk(IEnumerable<Series> existingSeries, Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries)
/// <summary>
/// Given a list of all Genres, generates new Genre entries for any that do not exist.
/// Does not delete anything, that will be handled by nightly task
/// </summary>
/// <param name="genres"></param>
private async Task CreateAllGenresAsync(ICollection<string> genres)
{
return existingSeries.Where(es => !ParserInfoHelpers.SeriesHasMatchingParserInfoFormat(es, parsedSeries));
_logger.LogInformation("[ScannerService] Attempting to pre-save all Genres");
try
{
// Pass the non-normalized genres directly to the repository
var nonExistingGenres = await _unitOfWork.GenreRepository.GetAllGenresNotInListAsync(genres);
// Create and attach new genres using the non-normalized names
foreach (var genre in nonExistingGenres)
{
var newGenre = new GenreBuilder(genre).Build();
_unitOfWork.GenreRepository.Attach(newGenre);
}
// Commit changes
if (nonExistingGenres.Count > 0)
{
await _unitOfWork.CommitAsync();
}
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] There was an unknown issue when pre-saving all Genres");
}
}
/// <summary>
/// Given a list of all Tags, generates new Tag entries for any that do not exist.
/// Does not delete anything, that will be handled by nightly task
/// </summary>
/// <param name="tags"></param>
private async Task CreateAllTagsAsync(ICollection<string> tags)
{
_logger.LogInformation("[ScannerService] Attempting to pre-save all Tags");
try
{
// Pass the non-normalized tags directly to the repository
var nonExistingTags = await _unitOfWork.TagRepository.GetAllTagsNotInListAsync(tags);
// Create and attach new genres using the non-normalized names
foreach (var tag in nonExistingTags)
{
var newTag = new TagBuilder(tag).Build();
_unitOfWork.TagRepository.Attach(newTag);
}
// Commit changes
if (nonExistingTags.Count > 0)
{
await _unitOfWork.CommitAsync();
}
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] There was an unknown issue when pre-saving all Tags");
}
}
}

View File

@ -13,6 +13,7 @@ public static class MessageFactoryEntityTypes
public const string Chapter = "chapter";
public const string CollectionTag = "collection";
public const string ReadingList = "readingList";
public const string Person = "person";
}
public static class MessageFactory
{

View File

@ -271,6 +271,9 @@ public class Startup
await MigrateInitialInstallData.Migrate(dataContext, logger, directoryService);
await MigrateSeriesLowestFolderPath.Migrate(dataContext, logger, directoryService);
// v0.8.4
await MigrateLowestSeriesFolderPath2.Migrate(dataContext, unitOfWork, logger);
// Update the version in the DB after all migrations are run
var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion);
installVersion.Value = BuildInfo.Version.ToString();

View File

@ -65,4 +65,7 @@ If you just want to play with Swagger, you can just
- dotnet run -c Debug
- Go to http://localhost:5000/swagger/index.html
If you have a build issue around swagger run:
` swagger tofile --output ../openapi.json API/bin/Debug/net8.0/API.dll v1` to see the error and correct it
If you have any questions about any of this, please let us know.

View File

@ -3,7 +3,7 @@
<TargetFramework>net8.0</TargetFramework>
<Company>kavitareader.com</Company>
<Product>Kavita</Product>
<AssemblyVersion>0.8.3.4</AssemblyVersion>
<AssemblyVersion>0.8.3.13</AssemblyVersion>
<NeutralLanguage>en</NeutralLanguage>
<TieredPGO>true</TieredPGO>
</PropertyGroup>

View File

@ -504,7 +504,6 @@
"version": "17.3.4",
"resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-17.3.4.tgz",
"integrity": "sha512-TVWjpZSI/GIXTYsmVgEKYjBckcW8Aj62DcxLNehRFR+c7UB95OY3ZFjU8U4jL0XvWPgTkkVWQVq+P6N4KCBsyw==",
"dev": true,
"dependencies": {
"@babel/core": "7.23.9",
"@jridgewell/sourcemap-codec": "^1.4.14",
@ -532,7 +531,6 @@
"version": "7.23.9",
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.9.tgz",
"integrity": "sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw==",
"dev": true,
"dependencies": {
"@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.23.5",
@ -561,14 +559,12 @@
"node_modules/@angular/compiler-cli/node_modules/@babel/core/node_modules/convert-source-map": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
"dev": true
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="
},
"node_modules/@angular/compiler-cli/node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
@ -749,7 +745,6 @@
"version": "7.24.0",
"resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.0.tgz",
"integrity": "sha512-fQfkg0Gjkza3nf0c7/w6Xf34BW4YvzNfACRLmmb7XRLa6XHdR+K9AlJlxneFfWYf6uhOzuzZVTjF/8KfndZANw==",
"dev": true,
"dependencies": {
"@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.23.5",
@ -778,14 +773,12 @@
"node_modules/@babel/core/node_modules/convert-source-map": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
"dev": true
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="
},
"node_modules/@babel/core/node_modules/semver": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
"dev": true,
"bin": {
"semver": "bin/semver.js"
}
@ -5629,7 +5622,6 @@
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
"dev": true,
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
@ -5642,7 +5634,6 @@
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"engines": {
"node": ">=8.6"
},
@ -5914,7 +5905,6 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
"dev": true,
"engines": {
"node": ">=8"
},
@ -6226,7 +6216,6 @@
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
"dev": true,
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
@ -6518,8 +6507,7 @@
"node_modules/convert-source-map": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
"integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
"dev": true
"integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="
},
"node_modules/cookie": {
"version": "0.6.0",
@ -7421,7 +7409,6 @@
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
"integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
"dev": true,
"optional": true,
"dependencies": {
"iconv-lite": "^0.6.2"
@ -7431,7 +7418,6 @@
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"dev": true,
"optional": true,
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
@ -8540,7 +8526,6 @@
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"optional": true,
"os": [
@ -9222,7 +9207,6 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
"dev": true,
"dependencies": {
"binary-extensions": "^2.0.0"
},
@ -11063,7 +11047,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
@ -12453,7 +12436,6 @@
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
"dev": true,
"dependencies": {
"picomatch": "^2.2.1"
},
@ -12465,7 +12447,6 @@
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"engines": {
"node": ">=8.6"
},
@ -12476,8 +12457,7 @@
"node_modules/reflect-metadata": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.2.tgz",
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==",
"dev": true
"integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="
},
"node_modules/regenerate": {
"version": "1.4.2",
@ -12945,7 +12925,7 @@
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
"dev": true
"devOptional": true
},
"node_modules/sass": {
"version": "1.71.1",
@ -13064,7 +13044,6 @@
"version": "7.6.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz",
"integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==",
"dev": true,
"dependencies": {
"lru-cache": "^6.0.0"
},
@ -13079,7 +13058,6 @@
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dev": true,
"dependencies": {
"yallist": "^4.0.0"
},
@ -13090,8 +13068,7 @@
"node_modules/semver/node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"dev": true
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/send": {
"version": "0.18.0",
@ -14222,7 +14199,6 @@
"version": "5.4.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
"dev": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"

View File

@ -125,7 +125,6 @@ $image-width: 160px;
.overlay-information--centered {
position: absolute;
border-radius: 15px;
background-color: rgba(0, 0, 0, 0.7);
border-radius: 50px;
top: 50%;
@ -169,7 +168,7 @@ $image-width: 160px;
margin: 0;
text-align: center;
max-width: 98px;
a {
overflow: hidden;
text-overflow: ellipsis;

View File

@ -17,7 +17,15 @@ export enum PersonRole {
}
export interface Person {
id: number;
name: string;
role: PersonRole;
id: number;
name: string;
description: string;
coverImage?: string;
coverImageLocked: boolean;
malId?: number;
aniListId?: number;
hardcoverId?: string;
asin?: string;
primaryColor?: string;
secondaryColor?: string;
}

View File

@ -1,3 +1,5 @@
import {PersonRole} from "../person";
export enum FilterField
{
None = -1,
@ -47,3 +49,36 @@ enumArray.sort((a, b) => a.value.localeCompare(b.value));
export const allFields = enumArray
.map(key => parseInt(key.key, 10))as FilterField[];
export const allPeople = [
FilterField.Characters,
FilterField.Colorist,
FilterField.CoverArtist,
FilterField.Editor,
FilterField.Inker,
FilterField.Letterer,
FilterField.Penciller,
FilterField.Publisher,
FilterField.Translators,
FilterField.Writers,
];
export const personRoleForFilterField = (role: PersonRole) => {
switch (role) {
case PersonRole.Artist: return FilterField.CoverArtist;
case PersonRole.Character: return FilterField.Characters;
case PersonRole.Colorist: return FilterField.Colorist;
case PersonRole.CoverArtist: return FilterField.CoverArtist;
case PersonRole.Editor: return FilterField.Editor;
case PersonRole.Inker: return FilterField.Inker;
case PersonRole.Letterer: return FilterField.Letterer;
case PersonRole.Penciller: return FilterField.Penciller;
case PersonRole.Publisher: return FilterField.Publisher;
case PersonRole.Translator: return FilterField.Translators;
case PersonRole.Writer: return FilterField.Writers;
case PersonRole.Imprint: return FilterField.Imprint;
case PersonRole.Location: return FilterField.Location;
case PersonRole.Team: return FilterField.Team;
case PersonRole.Other: return FilterField.None;
}
};

View File

@ -0,0 +1,6 @@
import {Person} from "../metadata/person";
export interface BrowsePerson extends Person {
seriesCount: number;
issueCount: number;
}

View File

@ -4,14 +4,18 @@ import { MangaFile } from "../manga-file";
import { SearchResult } from "./search-result";
import { Tag } from "../tag";
import {BookmarkSearchResult} from "./bookmark-search-result";
import {Genre} from "../metadata/genre";
import {ReadingList} from "../reading-list";
import {UserCollection} from "../collection-tag";
import {Person} from "../metadata/person";
export class SearchResultGroup {
libraries: Array<Library> = [];
series: Array<SearchResult> = [];
collections: Array<Tag> = [];
readingLists: Array<Tag> = [];
persons: Array<Tag> = [];
genres: Array<Tag> = [];
collections: Array<UserCollection> = [];
readingLists: Array<ReadingList> = [];
persons: Array<Person> = [];
genres: Array<Genre> = [];
tags: Array<Tag> = [];
files: Array<MangaFile> = [];
chapters: Array<Chapter> = [];

View File

@ -7,4 +7,5 @@ export enum SideNavStreamType {
ExternalSource = 6,
AllSeries = 7,
WantToRead = 8,
BrowseAuthors = 9
}

View File

@ -1,5 +1,5 @@
import {SideNavStreamType} from "./sidenav-stream-type.enum";
import {Library, LibraryType} from "../library/library";
import {Library} from "../library/library";
import {CommonStream} from "../common-stream";
import {ExternalSource} from "./external-source";

View File

@ -0,0 +1,9 @@
import {Chapter} from "./chapter";
import {LibraryType} from "./library/library";
export interface StandaloneChapter extends Chapter {
seriesId: number;
libraryId: number;
libraryType: LibraryType;
volumeTitle?: string;
}

View File

@ -18,7 +18,7 @@ export class PersonRolePipe implements PipeTransform {
case PersonRole.Colorist:
return this.translocoService.translate('person-role-pipe.colorist');
case PersonRole.CoverArtist:
return this.translocoService.translate('person-role-pipe.cover-artist');
return this.translocoService.translate('person-role-pipe.artist');
case PersonRole.Editor:
return this.translocoService.translate('person-role-pipe.editor');
case PersonRole.Inker:

Some files were not shown because too many files have changed in this diff Show More