diff --git a/API/DTOs/SearchQueryDto.cs b/API/DTOs/SearchQueryDto.cs new file mode 100644 index 000000000..ea31a9266 --- /dev/null +++ b/API/DTOs/SearchQueryDto.cs @@ -0,0 +1,7 @@ +namespace API.DTOs +{ + public class SearchQueryDto + { + + } +} \ No newline at end of file diff --git a/API/DTOs/SearchResultDto.cs b/API/DTOs/SearchResultDto.cs new file mode 100644 index 000000000..89c5bd349 --- /dev/null +++ b/API/DTOs/SearchResultDto.cs @@ -0,0 +1,7 @@ +namespace API.DTOs +{ + public class SearchResultDto + { + + } +} \ No newline at end of file diff --git a/API/Extensions/HttpExtensions.cs b/API/Extensions/HttpExtensions.cs new file mode 100644 index 000000000..b930e2652 --- /dev/null +++ b/API/Extensions/HttpExtensions.cs @@ -0,0 +1,7 @@ +namespace API.Extensions +{ + public class HttpExtensions + { + + } +} \ No newline at end of file diff --git a/API/Helpers/PagedList.cs b/API/Helpers/PagedList.cs new file mode 100644 index 000000000..b5a8fae03 --- /dev/null +++ b/API/Helpers/PagedList.cs @@ -0,0 +1,7 @@ +namespace API.Helpers +{ + public class PagedList + { + + } +} \ No newline at end of file diff --git a/API/Helpers/PaginationHeader.cs b/API/Helpers/PaginationHeader.cs new file mode 100644 index 000000000..ea0140d4c --- /dev/null +++ b/API/Helpers/PaginationHeader.cs @@ -0,0 +1,7 @@ +namespace API.Helpers +{ + public class PaginationHeader + { + + } +} \ No newline at end of file diff --git a/API/Helpers/UserParams.cs b/API/Helpers/UserParams.cs new file mode 100644 index 000000000..a6aa2d304 --- /dev/null +++ b/API/Helpers/UserParams.cs @@ -0,0 +1,7 @@ +namespace API.Helpers +{ + public class UserParams + { + + } +} \ No newline at end of file diff --git a/API/Services/ScannerService.cs b/API/Services/ScannerService.cs index 07f3404c6..d711c0d04 100644 --- a/API/Services/ScannerService.cs +++ b/API/Services/ScannerService.cs @@ -162,36 +162,34 @@ namespace API.Services _logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount); // Add new series that have parsedInfos - foreach (var info in parsedSeries) + foreach (var (key, _) in parsedSeries) { - var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(info.Key)); + var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key)); if (existingSeries == null) { existingSeries = new Series() { - Name = info.Key, - OriginalName = info.Key, - NormalizedName = Parser.Parser.Normalize(info.Key), - SortName = info.Key, + Name = key, + OriginalName = key, + NormalizedName = Parser.Parser.Normalize(key), + SortName = key, Summary = "", Volumes = new List() }; library.Series.Add(existingSeries); } - existingSeries.NormalizedName = Parser.Parser.Normalize(info.Key); + existingSeries.NormalizedName = Parser.Parser.Normalize(key); } - - int total = 0; + // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series var librarySeries = library.Series.ToList(); - Parallel.ForEach(librarySeries, () => 0, (series, state, subtotal) => + Parallel.ForEach(librarySeries, (series) => { _logger.LogInformation("Processing series {SeriesName}", series.Name); UpdateVolumes(series, parsedSeries[series.Name].ToArray()); series.Pages = series.Volumes.Sum(v => v.Pages); _metadataService.UpdateMetadata(series, _forceUpdate); - return 0; - }, finalResult => Interlocked.Add(ref total, finalResult)); + }); foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; }