diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs index 26ba00497..67b93273b 100644 --- a/API.Tests/Services/ScannerServiceTests.cs +++ b/API.Tests/Services/ScannerServiceTests.cs @@ -112,8 +112,18 @@ public class ScannerServiceTests : AbstractDbTest Assert.NotNull(postLib); Assert.Equal(4, postLib.Series.Count); + } - Assert.True(true); + [Fact] + public async Task ScanLibrary_ShouldCombineNestedFolder() + { + var testcase = "Series and Series-Series Combined - Manga.json"; + var postLib = await GenerateScannerData(testcase); + + Assert.NotNull(postLib); + Assert.Single(postLib.Series); + Assert.Single(postLib.Series); + Assert.Equal(2, postLib.Series.First().Volumes.Count); } private async Task GenerateScannerData(string testcase) diff --git a/API.Tests/Services/Test Data/ScannerService/TestCases/Series and Series-Series Combined - Manga.json b/API.Tests/Services/Test Data/ScannerService/TestCases/Series and Series-Series Combined - Manga.json new file mode 100644 index 000000000..410994952 --- /dev/null +++ b/API.Tests/Services/Test Data/ScannerService/TestCases/Series and Series-Series Combined - Manga.json @@ -0,0 +1,4 @@ +[ + "Dress Up Darling/Dress Up Darling Ch 01.cbz", + "Dress Up Darling/Dress Up Darling/Dress Up Darling Vol 01.cbz" +] diff --git a/API/Data/Repositories/VolumeRepository.cs b/API/Data/Repositories/VolumeRepository.cs index 48159386d..0dfbd6393 100644 --- a/API/Data/Repositories/VolumeRepository.cs +++ b/API/Data/Repositories/VolumeRepository.cs @@ -127,9 +127,18 @@ public class VolumeRepository : IVolumeRepository if (includeChapters) { - query = query.Include(v => v.Chapters).AsSplitQuery(); + query = query + .Includes(VolumeIncludes.Chapters) + .AsSplitQuery(); } - return await query.ToListAsync(); + var volumes = await query.ToListAsync(); + + foreach (var volume in volumes) + { + volume.Chapters = volume.Chapters.OrderBy(c => c.SortOrder).ToList(); + } + + return volumes; } /// @@ -142,12 +151,11 @@ public class VolumeRepository : IVolumeRepository { var volume = await _context.Volume .Where(vol => vol.Id == volumeId) - .Include(vol => vol.Chapters) - .ThenInclude(c => c.Files) + .Includes(VolumeIncludes.Chapters | VolumeIncludes.Files) .AsSplitQuery() .OrderBy(v => v.MinNumber) .ProjectTo(_mapper.ConfigurationProvider) - .SingleOrDefaultAsync(vol => vol.Id == volumeId); + .FirstOrDefaultAsync(vol => vol.Id == volumeId); if (volume == null) return null; @@ -166,8 +174,7 @@ public class VolumeRepository : IVolumeRepository { return await _context.Volume .Where(vol => vol.SeriesId == seriesId) - .Include(vol => vol.Chapters) - .ThenInclude(c => c.Files) + .Includes(VolumeIncludes.Chapters | VolumeIncludes.Files) .AsSplitQuery() .OrderBy(vol => vol.MinNumber) .ToListAsync(); @@ -205,24 +212,19 @@ public class VolumeRepository : IVolumeRepository await AddVolumeModifiers(userId, volumes); - foreach (var volume in volumes) - { - volume.Chapters = volume.Chapters.OrderBy(c => c.SortOrder).ToList(); - } - return volumes; } public async Task GetVolumeByIdAsync(int volumeId) { - return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId); + return await _context.Volume.FirstOrDefaultAsync(x => x.Id == volumeId); } public async Task> GetAllWithCoversInDifferentEncoding(EncodeFormat encodeFormat) { var extension = encodeFormat.GetExtension(); return await _context.Volume - .Include(v => v.Chapters) + .Includes(VolumeIncludes.Chapters) .Where(c => !string.IsNullOrEmpty(c.CoverImage) && !c.CoverImage.EndsWith(extension)) .AsSplitQuery() .ToListAsync(); diff --git a/API/Extensions/QueryExtensions/IncludesExtensions.cs b/API/Extensions/QueryExtensions/IncludesExtensions.cs index bcd242bcc..49f183590 100644 --- a/API/Extensions/QueryExtensions/IncludesExtensions.cs +++ b/API/Extensions/QueryExtensions/IncludesExtensions.cs @@ -80,25 +80,25 @@ public static class IncludesExtensions if (includes.HasFlag(VolumeIncludes.Files)) { queryable = queryable - .Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)) + .Include(vol => vol.Chapters) .ThenInclude(c => c.Files); } else if (includes.HasFlag(VolumeIncludes.Chapters)) { queryable = queryable - .Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)); + .Include(vol => vol.Chapters); } if (includes.HasFlag(VolumeIncludes.People)) { queryable = queryable - .Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)) + .Include(vol => vol.Chapters) .ThenInclude(c => c.People); } if (includes.HasFlag(VolumeIncludes.Tags)) { queryable = queryable - .Include(vol => vol.Chapters.OrderBy(c => c.SortOrder)) + .Include(vol => vol.Chapters) .ThenInclude(c => c.Tags); } diff --git a/API/Helpers/AutoMapperProfiles.cs b/API/Helpers/AutoMapperProfiles.cs index 06a1a4b2e..fb77179b3 100644 --- a/API/Helpers/AutoMapperProfiles.cs +++ b/API/Helpers/AutoMapperProfiles.cs @@ -51,7 +51,10 @@ public class AutoMapperProfiles : Profile .ForMember(dest => dest.Series, opt => opt.MapFrom(src => src.Series)); CreateMap(); CreateMap() - .ForMember(dest => dest.Number, opt => opt.MapFrom(src => (int) src.MinNumber)); + .ForMember(dest => dest.Number, + opt => opt.MapFrom(src => (int) src.MinNumber)) + .ForMember(dest => dest.Chapters, + opt => opt.MapFrom(src => src.Chapters.OrderBy(c => c.SortOrder))); CreateMap(); CreateMap(); CreateMap(); diff --git a/API/Services/BookmarkService.cs b/API/Services/BookmarkService.cs index f28ef9f74..4cd77ddd9 100644 --- a/API/Services/BookmarkService.cs +++ b/API/Services/BookmarkService.cs @@ -7,6 +7,7 @@ using API.Data; using API.DTOs.Reader; using API.Entities; using API.Entities.Enums; +using API.Extensions; using Hangfire; using Microsoft.Extensions.Logging; @@ -90,6 +91,13 @@ public class BookmarkService : IBookmarkService var bookmark = await _unitOfWork.UserRepository.GetBookmarkAsync(bookmarkId); if (bookmark == null) return; + // Validate the bookmark isn't already in target format + if (bookmark.FileName.EndsWith(encodeFormat.GetExtension())) + { + // Nothing to ddo + return; + } + bookmark.FileName = await _mediaConversionService.SaveAsEncodingFormat(bookmarkDirectory, bookmark.FileName, BookmarkStem(bookmark.AppUserId, bookmark.SeriesId, bookmark.ChapterId), encodeFormat); _unitOfWork.UserRepository.Update(bookmark); @@ -137,7 +145,7 @@ public class BookmarkService : IBookmarkService _unitOfWork.UserRepository.Add(bookmark); await _unitOfWork.CommitAsync(); - if (settings.EncodeMediaAs == EncodeFormat.WEBP) + if (settings.EncodeMediaAs != EncodeFormat.PNG) { // Enqueue a task to convert the bookmark to webP BackgroundJob.Enqueue(() => ConvertBookmarkToEncoding(bookmark.Id)); diff --git a/API/Services/SeriesService.cs b/API/Services/SeriesService.cs index 473035c20..72eee3672 100644 --- a/API/Services/SeriesService.cs +++ b/API/Services/SeriesService.cs @@ -453,10 +453,6 @@ public class SeriesService : ISeriesService continue; } - volume.Chapters = volume.Chapters - .OrderBy(d => d.MinNumber, ChapterSortComparerDefaultLast.Default) - .ToList(); - if (RenameVolumeName(volume, libraryType, volumeLabel) || (bookTreatment && !volume.IsSpecial())) { processedVolumes.Add(volume); diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index 4e183feb1..fda3d923f 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -220,6 +220,7 @@ public class TaskScheduler : ITaskScheduler public void AnalyzeFilesForLibrary(int libraryId, bool forceUpdate = false) { + _logger.LogInformation("Enqueuing library file analysis for: {LibraryId}", libraryId); BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanLibrary(libraryId, forceUpdate)); } diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs index 5c74272c1..16586bc9d 100644 --- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -435,7 +435,15 @@ public class ParseScannedFiles { if (scannedSeries[series].Count <= 0) continue; - UpdateSortOrder(scannedSeries, series); + try + { + UpdateSortOrder(scannedSeries, series); + } + catch (Exception ex) + { + _logger.LogError(ex, "There was an issue setting IssueOrder"); + } + processedScannedSeries.Add(new ScannedSeriesResult() { @@ -500,88 +508,90 @@ public class ParseScannedFiles } - private void UpdateSortOrder(ConcurrentDictionary> scannedSeries, ParsedSeries series) + public static void UpdateSortOrder(ConcurrentDictionary> scannedSeries, ParsedSeries series) { - try + // Set the Sort order per Volume + var volumes = scannedSeries[series].GroupBy(info => info.Volumes); + foreach (var volume in volumes) { - // Set the Sort order per Volume - var volumes = scannedSeries[series].GroupBy(info => info.Volumes); - foreach (var volume in volumes) + var infos = scannedSeries[series].Where(info => info.Volumes == volume.Key).ToList(); + IList chapters; + var specialTreatment = infos.TrueForAll(info => info.IsSpecial); + var hasAnySpMarker = infos.Exists(info => info.SpecialIndex > 0); + var counter = 0f; + + // Handle specials with SpecialIndex + if (specialTreatment && hasAnySpMarker) { - var infos = scannedSeries[series].Where(info => info.Volumes == volume.Key).ToList(); - IList chapters; - var specialTreatment = infos.TrueForAll(info => info.IsSpecial); - var hasAnySpMarker = infos.Exists(info => info.SpecialIndex > 0); - var counter = 0f; - - if (specialTreatment && hasAnySpMarker) - { - chapters = infos - .OrderBy(info => info.SpecialIndex) - .ToList(); - - foreach (var chapter in chapters) - { - chapter.IssueOrder = counter; - counter++; - } - continue; - } - - - // If everything is a special but we don't have any SpecialIndex, then order naturally and use 0, 1, 2 - if (specialTreatment) - { - chapters = infos - .OrderByNatural(info => Parser.Parser.RemoveExtensionIfSupported(info.Filename)!) - .ToList(); - - foreach (var chapter in chapters) - { - chapter.IssueOrder = counter; - counter++; - } - continue; - } - chapters = infos - .OrderByNatural(info => info.Chapters, StringComparer.InvariantCulture) + .OrderBy(info => info.SpecialIndex) .ToList(); - counter = 0f; - var prevIssue = string.Empty; foreach (var chapter in chapters) { - if (float.TryParse(chapter.Chapters, CultureInfo.InvariantCulture, out var parsedChapter)) + chapter.IssueOrder = counter; + counter++; + } + continue; + } + + // Handle specials without SpecialIndex (natural order) + if (specialTreatment) + { + chapters = infos + .OrderByNatural(info => Parser.Parser.RemoveExtensionIfSupported(info.Filename)!) + .ToList(); + + foreach (var chapter in chapters) + { + chapter.IssueOrder = counter; + counter++; + } + continue; + } + + // Ensure chapters are sorted numerically when possible, otherwise push unparseable to the end + chapters = infos + .OrderBy(info => float.TryParse(info.Chapters, NumberStyles.Any, CultureInfo.InvariantCulture, out var val) ? val : float.MaxValue) + .ToList(); + + counter = 0f; + var prevIssue = string.Empty; + foreach (var chapter in chapters) + { + if (float.TryParse(chapter.Chapters, NumberStyles.Any, CultureInfo.InvariantCulture, out var parsedChapter)) + { + // Parsed successfully, use the numeric value + counter = parsedChapter; + chapter.IssueOrder = counter; + + // Increment for next chapter (unless the next has a similar value, then add 0.1) + if (!string.IsNullOrEmpty(prevIssue) && float.TryParse(prevIssue, CultureInfo.InvariantCulture, out var prevIssueFloat) && parsedChapter.Is(prevIssueFloat)) { - counter = parsedChapter; - if (!string.IsNullOrEmpty(prevIssue) && float.TryParse(prevIssue, CultureInfo.InvariantCulture, out var prevIssueFloat) && parsedChapter.Is(prevIssueFloat)) - { - // Bump by 0.1 - counter += 0.1f; - } - chapter.IssueOrder = counter; - prevIssue = $"{parsedChapter}"; + counter += 0.1f; // bump if same value as the previous issue } - else + prevIssue = $"{parsedChapter}"; + } + else + { + // Unparsed chapters: use the current counter and bump for the next + if (!string.IsNullOrEmpty(prevIssue) && prevIssue == counter.ToString(CultureInfo.InvariantCulture)) { - // I need to bump by 0.1f as if the prevIssue matches counter - if (!string.IsNullOrEmpty(prevIssue) && prevIssue == counter + "") - { - // Bump by 0.1 - counter += 0.1f; - } - chapter.IssueOrder = counter; - counter++; - prevIssue = chapter.Chapters; + counter += 0.1f; // bump if same value as the previous issue } + chapter.IssueOrder = counter; + counter++; + prevIssue = chapter.Chapters; } } } - catch (Exception ex) - { - _logger.LogError(ex, "There was an issue setting IssueOrder"); - } + } + + + private bool HasAllSeriesFolderNotChangedSinceLastScan(IList seriesFolders, + string normalizedFolder) + { + return seriesFolders.All(f => HasSeriesFolderNotChangedSinceLastScan(f, normalizedFolder)); } /// @@ -603,12 +613,6 @@ public class ParseScannedFiles return false; } - private bool HasAllSeriesFolderNotChangedSinceLastScan(IList seriesFolders, - string normalizedFolder) - { - return seriesFolders.All(f => HasSeriesFolderNotChangedSinceLastScan(f, normalizedFolder)); - } - private bool HasSeriesFolderNotChangedSinceLastScan(SeriesModified seriesModified, string normalizedFolder) { return seriesModified.LastScanned.Truncate(TimeSpan.TicksPerSecond) >= diff --git a/UI/Web/src/app/manga-reader/_components/manga-reader/manga-reader.component.ts b/UI/Web/src/app/manga-reader/_components/manga-reader/manga-reader.component.ts index b06e6b009..38cef975e 100644 --- a/UI/Web/src/app/manga-reader/_components/manga-reader/manga-reader.component.ts +++ b/UI/Web/src/app/manga-reader/_components/manga-reader/manga-reader.component.ts @@ -812,7 +812,7 @@ export class MangaReaderComponent implements OnInit, AfterViewInit, OnDestroy { && (this.readerService.imageUrlToChapterId(img.src) == chapterId || this.readerService.imageUrlToChapterId(img.src) === -1) ); - console.log('Requesting page ', pageNum, ' found page: ', img, ' and app is requesting new image? ', forceNew); + //console.log('Requesting page ', pageNum, ' found page: ', img, ' and app is requesting new image? ', forceNew); if (!img || forceNew) { img = new Image(); img.src = this.getPageUrl(pageNum, chapterId); diff --git a/UI/Web/src/app/nav/_components/grouped-typeahead/grouped-typeahead.component.ts b/UI/Web/src/app/nav/_components/grouped-typeahead/grouped-typeahead.component.ts index ee3e21810..a01e066a8 100644 --- a/UI/Web/src/app/nav/_components/grouped-typeahead/grouped-typeahead.component.ts +++ b/UI/Web/src/app/nav/_components/grouped-typeahead/grouped-typeahead.component.ts @@ -118,7 +118,8 @@ export class GroupedTypeaheadComponent implements OnInit { @HostListener('window:click', ['$event']) - handleDocumentClick(event: any) { + handleDocumentClick(event: MouseEvent) { + console.log('click: ', event) this.close(); } @@ -197,7 +198,7 @@ export class GroupedTypeaheadComponent implements OnInit { } toggleIncludeFiles(val: boolean) { - const firstRun = val === false && val === this.includeChapterAndFiles; + const firstRun = !val && val === this.includeChapterAndFiles; this.includeChapterAndFiles = val; this.inputChanged.emit({value: this.searchTerm, includeFiles: this.includeChapterAndFiles}); diff --git a/UI/Web/src/app/volume-detail/volume-detail.component.ts b/UI/Web/src/app/volume-detail/volume-detail.component.ts index 04480b9f3..0738a3d73 100644 --- a/UI/Web/src/app/volume-detail/volume-detail.component.ts +++ b/UI/Web/src/app/volume-detail/volume-detail.component.ts @@ -666,7 +666,6 @@ export class VolumeDetailComponent implements OnInit { const chaptersWithProgress = this.volume.chapters.filter(c => c.pagesRead < c.pages); if (chaptersWithProgress.length > 0 && this.volume.chapters.length > 1) { this.currentlyReadingChapter = chaptersWithProgress[0]; - console.log('Updating currentlyReading chapter', this.currentlyReadingChapter) this.cdRef.markForCheck(); } else { this.currentlyReadingChapter = undefined;