mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-05-24 00:52:23 -04:00
Removed some extra code that was not needed in parallelization code.
This commit is contained in:
parent
5ee27b5e7b
commit
7eb62376eb
7
API/DTOs/SearchQueryDto.cs
Normal file
7
API/DTOs/SearchQueryDto.cs
Normal file
@ -0,0 +1,7 @@
|
||||
namespace API.DTOs
|
||||
{
|
||||
public class SearchQueryDto
|
||||
{
|
||||
|
||||
}
|
||||
}
|
7
API/DTOs/SearchResultDto.cs
Normal file
7
API/DTOs/SearchResultDto.cs
Normal file
@ -0,0 +1,7 @@
|
||||
namespace API.DTOs
|
||||
{
|
||||
public class SearchResultDto
|
||||
{
|
||||
|
||||
}
|
||||
}
|
7
API/Extensions/HttpExtensions.cs
Normal file
7
API/Extensions/HttpExtensions.cs
Normal file
@ -0,0 +1,7 @@
|
||||
namespace API.Extensions
|
||||
{
|
||||
public class HttpExtensions
|
||||
{
|
||||
|
||||
}
|
||||
}
|
7
API/Helpers/PagedList.cs
Normal file
7
API/Helpers/PagedList.cs
Normal file
@ -0,0 +1,7 @@
|
||||
namespace API.Helpers
|
||||
{
|
||||
public class PagedList
|
||||
{
|
||||
|
||||
}
|
||||
}
|
7
API/Helpers/PaginationHeader.cs
Normal file
7
API/Helpers/PaginationHeader.cs
Normal file
@ -0,0 +1,7 @@
|
||||
namespace API.Helpers
|
||||
{
|
||||
public class PaginationHeader
|
||||
{
|
||||
|
||||
}
|
||||
}
|
7
API/Helpers/UserParams.cs
Normal file
7
API/Helpers/UserParams.cs
Normal file
@ -0,0 +1,7 @@
|
||||
namespace API.Helpers
|
||||
{
|
||||
public class UserParams
|
||||
{
|
||||
|
||||
}
|
||||
}
|
@ -162,36 +162,34 @@ namespace API.Services
|
||||
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
|
||||
|
||||
// Add new series that have parsedInfos
|
||||
foreach (var info in parsedSeries)
|
||||
foreach (var (key, _) in parsedSeries)
|
||||
{
|
||||
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(info.Key));
|
||||
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key));
|
||||
if (existingSeries == null)
|
||||
{
|
||||
existingSeries = new Series()
|
||||
{
|
||||
Name = info.Key,
|
||||
OriginalName = info.Key,
|
||||
NormalizedName = Parser.Parser.Normalize(info.Key),
|
||||
SortName = info.Key,
|
||||
Name = key,
|
||||
OriginalName = key,
|
||||
NormalizedName = Parser.Parser.Normalize(key),
|
||||
SortName = key,
|
||||
Summary = "",
|
||||
Volumes = new List<Volume>()
|
||||
};
|
||||
library.Series.Add(existingSeries);
|
||||
}
|
||||
existingSeries.NormalizedName = Parser.Parser.Normalize(info.Key);
|
||||
existingSeries.NormalizedName = Parser.Parser.Normalize(key);
|
||||
}
|
||||
|
||||
int total = 0;
|
||||
|
||||
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
|
||||
var librarySeries = library.Series.ToList();
|
||||
Parallel.ForEach<Series, int>(librarySeries, () => 0, (series, state, subtotal) =>
|
||||
Parallel.ForEach(librarySeries, (series) =>
|
||||
{
|
||||
_logger.LogInformation("Processing series {SeriesName}", series.Name);
|
||||
UpdateVolumes(series, parsedSeries[series.Name].ToArray());
|
||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||
_metadataService.UpdateMetadata(series, _forceUpdate);
|
||||
return 0;
|
||||
}, finalResult => Interlocked.Add(ref total, finalResult));
|
||||
});
|
||||
|
||||
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user