Sort Order Fix on Volume Detail Page (#3216)

This commit is contained in:
Joe Milazzo 2024-09-23 10:52:34 -05:00 committed by GitHub
parent ee253820f6
commit 0a13cc8454
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 131 additions and 103 deletions

View File

@ -112,8 +112,18 @@ public class ScannerServiceTests : AbstractDbTest
Assert.NotNull(postLib); Assert.NotNull(postLib);
Assert.Equal(4, postLib.Series.Count); Assert.Equal(4, postLib.Series.Count);
}
Assert.True(true); [Fact]
public async Task ScanLibrary_ShouldCombineNestedFolder()
{
var testcase = "Series and Series-Series Combined - Manga.json";
var postLib = await GenerateScannerData(testcase);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Single(postLib.Series);
Assert.Equal(2, postLib.Series.First().Volumes.Count);
} }
private async Task<Library> GenerateScannerData(string testcase) private async Task<Library> GenerateScannerData(string testcase)

View File

@ -0,0 +1,4 @@
[
"Dress Up Darling/Dress Up Darling Ch 01.cbz",
"Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz"
]

View File

@ -127,9 +127,18 @@ public class VolumeRepository : IVolumeRepository
if (includeChapters) if (includeChapters)
{ {
query = query.Include(v => v.Chapters).AsSplitQuery(); query = query
.Includes(VolumeIncludes.Chapters)
.AsSplitQuery();
} }
return await query.ToListAsync(); var volumes = await query.ToListAsync();
foreach (var volume in volumes)
{
volume.Chapters = volume.Chapters.OrderBy(c => c.SortOrder).ToList();
}
return volumes;
} }
/// <summary> /// <summary>
@ -142,12 +151,11 @@ public class VolumeRepository : IVolumeRepository
{ {
var volume = await _context.Volume var volume = await _context.Volume
.Where(vol => vol.Id == volumeId) .Where(vol => vol.Id == volumeId)
.Include(vol => vol.Chapters) .Includes(VolumeIncludes.Chapters | VolumeIncludes.Files)
.ThenInclude(c => c.Files)
.AsSplitQuery() .AsSplitQuery()
.OrderBy(v => v.MinNumber) .OrderBy(v => v.MinNumber)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider) .ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleOrDefaultAsync(vol => vol.Id == volumeId); .FirstOrDefaultAsync(vol => vol.Id == volumeId);
if (volume == null) return null; if (volume == null) return null;
@ -166,8 +174,7 @@ public class VolumeRepository : IVolumeRepository
{ {
return await _context.Volume return await _context.Volume
.Where(vol => vol.SeriesId == seriesId) .Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters) .Includes(VolumeIncludes.Chapters | VolumeIncludes.Files)
.ThenInclude(c => c.Files)
.AsSplitQuery() .AsSplitQuery()
.OrderBy(vol => vol.MinNumber) .OrderBy(vol => vol.MinNumber)
.ToListAsync(); .ToListAsync();
@ -205,24 +212,19 @@ public class VolumeRepository : IVolumeRepository
await AddVolumeModifiers(userId, volumes); await AddVolumeModifiers(userId, volumes);
foreach (var volume in volumes)
{
volume.Chapters = volume.Chapters.OrderBy(c => c.SortOrder).ToList();
}
return volumes; return volumes;
} }
public async Task<Volume?> GetVolumeByIdAsync(int volumeId) public async Task<Volume?> GetVolumeByIdAsync(int volumeId)
{ {
return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId); return await _context.Volume.FirstOrDefaultAsync(x => x.Id == volumeId);
} }
public async Task<IList<Volume>> GetAllWithCoversInDifferentEncoding(EncodeFormat encodeFormat) public async Task<IList<Volume>> GetAllWithCoversInDifferentEncoding(EncodeFormat encodeFormat)
{ {
var extension = encodeFormat.GetExtension(); var extension = encodeFormat.GetExtension();
return await _context.Volume return await _context.Volume
.Include(v => v.Chapters) .Includes(VolumeIncludes.Chapters)
.Where(c => !string.IsNullOrEmpty(c.CoverImage) && !c.CoverImage.EndsWith(extension)) .Where(c => !string.IsNullOrEmpty(c.CoverImage) && !c.CoverImage.EndsWith(extension))
.AsSplitQuery() .AsSplitQuery()
.ToListAsync(); .ToListAsync();

View File

@ -80,25 +80,25 @@ public static class IncludesExtensions
if (includes.HasFlag(VolumeIncludes.Files)) if (includes.HasFlag(VolumeIncludes.Files))
{ {
queryable = queryable queryable = queryable
.Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)) .Include(vol => vol.Chapters)
.ThenInclude(c => c.Files); .ThenInclude(c => c.Files);
} else if (includes.HasFlag(VolumeIncludes.Chapters)) } else if (includes.HasFlag(VolumeIncludes.Chapters))
{ {
queryable = queryable queryable = queryable
.Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)); .Include(vol => vol.Chapters);
} }
if (includes.HasFlag(VolumeIncludes.People)) if (includes.HasFlag(VolumeIncludes.People))
{ {
queryable = queryable queryable = queryable
.Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)) .Include(vol => vol.Chapters)
.ThenInclude(c => c.People); .ThenInclude(c => c.People);
} }
if (includes.HasFlag(VolumeIncludes.Tags)) if (includes.HasFlag(VolumeIncludes.Tags))
{ {
queryable = queryable queryable = queryable
.Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)) .Include(vol => vol.Chapters)
.ThenInclude(c => c.Tags); .ThenInclude(c => c.Tags);
} }

View File

@ -51,7 +51,10 @@ public class AutoMapperProfiles : Profile
.ForMember(dest => dest.Series, opt => opt.MapFrom(src => src.Series)); .ForMember(dest => dest.Series, opt => opt.MapFrom(src => src.Series));
CreateMap<LibraryDto, Library>(); CreateMap<LibraryDto, Library>();
CreateMap<Volume, VolumeDto>() CreateMap<Volume, VolumeDto>()
.ForMember(dest => dest.Number, opt => opt.MapFrom(src => (int) src.MinNumber)); .ForMember(dest => dest.Number,
opt => opt.MapFrom(src => (int) src.MinNumber))
.ForMember(dest => dest.Chapters,
opt => opt.MapFrom(src => src.Chapters.OrderBy(c => c.SortOrder)));
CreateMap<MangaFile, MangaFileDto>(); CreateMap<MangaFile, MangaFileDto>();
CreateMap<Series, SeriesDto>(); CreateMap<Series, SeriesDto>();
CreateMap<CollectionTag, CollectionTagDto>(); CreateMap<CollectionTag, CollectionTagDto>();

View File

@ -7,6 +7,7 @@ using API.Data;
using API.DTOs.Reader; using API.DTOs.Reader;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions;
using Hangfire; using Hangfire;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -90,6 +91,13 @@ public class BookmarkService : IBookmarkService
var bookmark = await _unitOfWork.UserRepository.GetBookmarkAsync(bookmarkId); var bookmark = await _unitOfWork.UserRepository.GetBookmarkAsync(bookmarkId);
if (bookmark == null) return; if (bookmark == null) return;
// Validate the bookmark isn't already in target format
if (bookmark.FileName.EndsWith(encodeFormat.GetExtension()))
{
// Nothing to ddo
return;
}
bookmark.FileName = await _mediaConversionService.SaveAsEncodingFormat(bookmarkDirectory, bookmark.FileName, bookmark.FileName = await _mediaConversionService.SaveAsEncodingFormat(bookmarkDirectory, bookmark.FileName,
BookmarkStem(bookmark.AppUserId, bookmark.SeriesId, bookmark.ChapterId), encodeFormat); BookmarkStem(bookmark.AppUserId, bookmark.SeriesId, bookmark.ChapterId), encodeFormat);
_unitOfWork.UserRepository.Update(bookmark); _unitOfWork.UserRepository.Update(bookmark);
@ -137,7 +145,7 @@ public class BookmarkService : IBookmarkService
_unitOfWork.UserRepository.Add(bookmark); _unitOfWork.UserRepository.Add(bookmark);
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
if (settings.EncodeMediaAs == EncodeFormat.WEBP) if (settings.EncodeMediaAs != EncodeFormat.PNG)
{ {
// Enqueue a task to convert the bookmark to webP // Enqueue a task to convert the bookmark to webP
BackgroundJob.Enqueue(() => ConvertBookmarkToEncoding(bookmark.Id)); BackgroundJob.Enqueue(() => ConvertBookmarkToEncoding(bookmark.Id));

View File

@ -453,10 +453,6 @@ public class SeriesService : ISeriesService
continue; continue;
} }
volume.Chapters = volume.Chapters
.OrderBy(d => d.MinNumber, ChapterSortComparerDefaultLast.Default)
.ToList();
if (RenameVolumeName(volume, libraryType, volumeLabel) || (bookTreatment && !volume.IsSpecial())) if (RenameVolumeName(volume, libraryType, volumeLabel) || (bookTreatment && !volume.IsSpecial()))
{ {
processedVolumes.Add(volume); processedVolumes.Add(volume);

View File

@ -220,6 +220,7 @@ public class TaskScheduler : ITaskScheduler
public void AnalyzeFilesForLibrary(int libraryId, bool forceUpdate = false) public void AnalyzeFilesForLibrary(int libraryId, bool forceUpdate = false)
{ {
_logger.LogInformation("Enqueuing library file analysis for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(libraryId, forceUpdate)); BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(libraryId, forceUpdate));
} }

View File

@ -435,7 +435,15 @@ public class ParseScannedFiles
{ {
if (scannedSeries[series].Count <= 0) continue; if (scannedSeries[series].Count <= 0) continue;
try
{
UpdateSortOrder(scannedSeries, series); UpdateSortOrder(scannedSeries, series);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue setting IssueOrder");
}
processedScannedSeries.Add(new ScannedSeriesResult() processedScannedSeries.Add(new ScannedSeriesResult()
{ {
@ -500,9 +508,7 @@ public class ParseScannedFiles
} }
private void UpdateSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParsedSeries series) public static void UpdateSortOrder(ConcurrentDictionary<ParsedSeries, List<ParserInfo>> scannedSeries, ParsedSeries series)
{
try
{ {
// Set the Sort order per Volume // Set the Sort order per Volume
var volumes = scannedSeries[series].GroupBy(info => info.Volumes); var volumes = scannedSeries[series].GroupBy(info => info.Volumes);
@ -514,6 +520,7 @@ public class ParseScannedFiles
var hasAnySpMarker = infos.Exists(info => info.SpecialIndex > 0); var hasAnySpMarker = infos.Exists(info => info.SpecialIndex > 0);
var counter = 0f; var counter = 0f;
// Handle specials with SpecialIndex
if (specialTreatment && hasAnySpMarker) if (specialTreatment && hasAnySpMarker)
{ {
chapters = infos chapters = infos
@ -528,8 +535,7 @@ public class ParseScannedFiles
continue; continue;
} }
// Handle specials without SpecialIndex (natural order)
// If everything is a special but we don't have any SpecialIndex, then order naturally and use 0, 1, 2
if (specialTreatment) if (specialTreatment)
{ {
chapters = infos chapters = infos
@ -544,32 +550,34 @@ public class ParseScannedFiles
continue; continue;
} }
// Ensure chapters are sorted numerically when possible, otherwise push unparseable to the end
chapters = infos chapters = infos
.OrderByNatural(info => info.Chapters, StringComparer.InvariantCulture) .OrderBy(info => float.TryParse(info.Chapters, NumberStyles.Any, CultureInfo.InvariantCulture, out var val) ? val : float.MaxValue)
.ToList(); .ToList();
counter = 0f; counter = 0f;
var prevIssue = string.Empty; var prevIssue = string.Empty;
foreach (var chapter in chapters) foreach (var chapter in chapters)
{ {
if (float.TryParse(chapter.Chapters, CultureInfo.InvariantCulture, out var parsedChapter)) if (float.TryParse(chapter.Chapters, NumberStyles.Any, CultureInfo.InvariantCulture, out var parsedChapter))
{ {
// Parsed successfully, use the numeric value
counter = parsedChapter; counter = parsedChapter;
chapter.IssueOrder = counter;
// Increment for next chapter (unless the next has a similar value, then add 0.1)
if (!string.IsNullOrEmpty(prevIssue) && float.TryParse(prevIssue, CultureInfo.InvariantCulture, out var prevIssueFloat) && parsedChapter.Is(prevIssueFloat)) if (!string.IsNullOrEmpty(prevIssue) && float.TryParse(prevIssue, CultureInfo.InvariantCulture, out var prevIssueFloat) && parsedChapter.Is(prevIssueFloat))
{ {
// Bump by 0.1 counter += 0.1f; // bump if same value as the previous issue
counter += 0.1f;
} }
chapter.IssueOrder = counter;
prevIssue = $"{parsedChapter}"; prevIssue = $"{parsedChapter}";
} }
else else
{ {
// I need to bump by 0.1f as if the prevIssue matches counter // Unparsed chapters: use the current counter and bump for the next
if (!string.IsNullOrEmpty(prevIssue) && prevIssue == counter + "") if (!string.IsNullOrEmpty(prevIssue) && prevIssue == counter.ToString(CultureInfo.InvariantCulture))
{ {
// Bump by 0.1 counter += 0.1f; // bump if same value as the previous issue
counter += 0.1f;
} }
chapter.IssueOrder = counter; chapter.IssueOrder = counter;
counter++; counter++;
@ -578,10 +586,12 @@ public class ParseScannedFiles
} }
} }
} }
catch (Exception ex)
private bool HasAllSeriesFolderNotChangedSinceLastScan(IList<SeriesModified> seriesFolders,
string normalizedFolder)
{ {
_logger.LogError(ex, "There was an issue setting IssueOrder"); return seriesFolders.All(f => HasSeriesFolderNotChangedSinceLastScan(f, normalizedFolder));
}
} }
/// <summary> /// <summary>
@ -603,12 +613,6 @@ public class ParseScannedFiles
return false; return false;
} }
private bool HasAllSeriesFolderNotChangedSinceLastScan(IList<SeriesModified> seriesFolders,
string normalizedFolder)
{
return seriesFolders.All(f => HasSeriesFolderNotChangedSinceLastScan(f, normalizedFolder));
}
private bool HasSeriesFolderNotChangedSinceLastScan(SeriesModified seriesModified, string normalizedFolder) private bool HasSeriesFolderNotChangedSinceLastScan(SeriesModified seriesModified, string normalizedFolder)
{ {
return seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) >= return seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) >=

View File

@ -812,7 +812,7 @@ export class MangaReaderComponent implements OnInit, AfterViewInit, OnDestroy {
&& (this.readerService.imageUrlToChapterId(img.src) == chapterId || this.readerService.imageUrlToChapterId(img.src) === -1) && (this.readerService.imageUrlToChapterId(img.src) == chapterId || this.readerService.imageUrlToChapterId(img.src) === -1)
); );
console.log('Requesting page ', pageNum, ' found page: ', img, ' and app is requesting new image? ', forceNew); //console.log('Requesting page ', pageNum, ' found page: ', img, ' and app is requesting new image? ', forceNew);
if (!img || forceNew) { if (!img || forceNew) {
img = new Image(); img = new Image();
img.src = this.getPageUrl(pageNum, chapterId); img.src = this.getPageUrl(pageNum, chapterId);

View File

@ -118,7 +118,8 @@ export class GroupedTypeaheadComponent implements OnInit {
@HostListener('window:click', ['$event']) @HostListener('window:click', ['$event'])
handleDocumentClick(event: any) { handleDocumentClick(event: MouseEvent) {
console.log('click: ', event)
this.close(); this.close();
} }
@ -197,7 +198,7 @@ export class GroupedTypeaheadComponent implements OnInit {
} }
toggleIncludeFiles(val: boolean) { toggleIncludeFiles(val: boolean) {
const firstRun = val === false && val === this.includeChapterAndFiles; const firstRun = !val && val === this.includeChapterAndFiles;
this.includeChapterAndFiles = val; this.includeChapterAndFiles = val;
this.inputChanged.emit({value: this.searchTerm, includeFiles: this.includeChapterAndFiles}); this.inputChanged.emit({value: this.searchTerm, includeFiles: this.includeChapterAndFiles});

View File

@ -666,7 +666,6 @@ export class VolumeDetailComponent implements OnInit {
const chaptersWithProgress = this.volume.chapters.filter(c => c.pagesRead < c.pages); const chaptersWithProgress = this.volume.chapters.filter(c => c.pagesRead < c.pages);
if (chaptersWithProgress.length > 0 && this.volume.chapters.length > 1) { if (chaptersWithProgress.length > 0 && this.volume.chapters.length > 1) {
this.currentlyReadingChapter = chaptersWithProgress[0]; this.currentlyReadingChapter = chaptersWithProgress[0];
console.log('Updating currentlyReading chapter', this.currentlyReadingChapter)
this.cdRef.markForCheck(); this.cdRef.markForCheck();
} else { } else {
this.currentlyReadingChapter = undefined; this.currentlyReadingChapter = undefined;