Removed some extra code that was not needed in parallelization code.

This commit is contained in:
Joseph Milazzo 2021-02-10 15:18:52 -06:00
parent 5ee27b5e7b
commit 7eb62376eb
7 changed files with 52 additions and 12 deletions

View File

@ -0,0 +1,7 @@
namespace API.DTOs
{
public class SearchQueryDto
{
}
}

View File

@ -0,0 +1,7 @@
namespace API.DTOs
{
public class SearchResultDto
{
}
}

View File

@ -0,0 +1,7 @@
namespace API.Extensions
{
public class HttpExtensions
{
}
}

7
API/Helpers/PagedList.cs Normal file
View File

@ -0,0 +1,7 @@
namespace API.Helpers
{
public class PagedList
{
}
}

View File

@ -0,0 +1,7 @@
namespace API.Helpers
{
public class PaginationHeader
{
}
}

View File

@ -0,0 +1,7 @@
namespace API.Helpers
{
public class UserParams
{
}
}

View File

@ -162,36 +162,34 @@ namespace API.Services
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount); _logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
// Add new series that have parsedInfos // Add new series that have parsedInfos
foreach (var info in parsedSeries) foreach (var (key, _) in parsedSeries)
{ {
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(info.Key)); var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key));
if (existingSeries == null) if (existingSeries == null)
{ {
existingSeries = new Series() existingSeries = new Series()
{ {
Name = info.Key, Name = key,
OriginalName = info.Key, OriginalName = key,
NormalizedName = Parser.Parser.Normalize(info.Key), NormalizedName = Parser.Parser.Normalize(key),
SortName = info.Key, SortName = key,
Summary = "", Summary = "",
Volumes = new List<Volume>() Volumes = new List<Volume>()
}; };
library.Series.Add(existingSeries); library.Series.Add(existingSeries);
} }
existingSeries.NormalizedName = Parser.Parser.Normalize(info.Key); existingSeries.NormalizedName = Parser.Parser.Normalize(key);
} }
int total = 0;
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
var librarySeries = library.Series.ToList(); var librarySeries = library.Series.ToList();
Parallel.ForEach<Series, int>(librarySeries, () => 0, (series, state, subtotal) => Parallel.ForEach(librarySeries, (series) =>
{ {
_logger.LogInformation("Processing series {SeriesName}", series.Name); _logger.LogInformation("Processing series {SeriesName}", series.Name);
UpdateVolumes(series, parsedSeries[series.Name].ToArray()); UpdateVolumes(series, parsedSeries[series.Name].ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages); series.Pages = series.Volumes.Sum(v => v.Pages);
_metadataService.UpdateMetadata(series, _forceUpdate); _metadataService.UpdateMetadata(series, _forceUpdate);
return 0; });
}, finalResult => Interlocked.Add(ref total, finalResult));
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
} }