Fixed a bug where scanned series weren't all being saved due to threads conflicting with each other.

This commit is contained in:
Joseph Milazzo 2021-01-25 15:39:29 -06:00
parent ae6682f011
commit 21f6889b99

View File

@ -6,6 +6,7 @@ using System.Diagnostics;
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
@ -20,7 +21,7 @@ namespace API.Services
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<ScannerService> _logger; private readonly ILogger<ScannerService> _logger;
private ConcurrentDictionary<string, ConcurrentBag<ParserInfo>> _scannedSeries; private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger) public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger)
{ {
@ -53,7 +54,7 @@ namespace API.Services
return; return;
} }
_scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>(); _scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
_logger.LogInformation($"Beginning scan on {library.Name}. Forcing metadata update: {forceUpdate}"); _logger.LogInformation($"Beginning scan on {library.Name}. Forcing metadata update: {forceUpdate}");
var totalFiles = 0; var totalFiles = 0;
@ -77,7 +78,7 @@ namespace API.Services
} }
} }
var filtered = _scannedSeries.Where(kvp => !kvp.Value.IsEmpty); var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value); var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value);
// Perform DB activities // Perform DB activities
@ -101,7 +102,7 @@ namespace API.Services
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name); _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
} }
private List<Series> UpsertSeries(int libraryId, bool forceUpdate, ImmutableDictionary<string, ConcurrentBag<ParserInfo>> series, Library library) private List<Series> UpsertSeries(int libraryId, bool forceUpdate, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
{ {
var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList(); var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
foreach (var seriesKey in series.Keys) foreach (var seriesKey in series.Keys)
@ -129,7 +130,7 @@ namespace API.Services
return allSeries; return allSeries;
} }
private void RemoveSeriesNotOnDisk(List<Series> allSeries, ImmutableDictionary<string, ConcurrentBag<ParserInfo>> series, Library library) private void RemoveSeriesNotOnDisk(List<Series> allSeries, ImmutableDictionary<string, List<ParserInfo>> series, Library library)
{ {
var count = 0; var count = 0;
foreach (var existingSeries in allSeries) foreach (var existingSeries in allSeries)
@ -149,33 +150,20 @@ namespace API.Services
/// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing. /// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing.
/// </summary> /// </summary>
/// <param name="info"></param> /// <param name="info"></param>
public void TrackSeries(ParserInfo info) private void TrackSeries(ParserInfo info)
{ {
if (info.Series == string.Empty) return; if (info.Series == string.Empty) return;
ConcurrentBag<ParserInfo> newBag = new ConcurrentBag<ParserInfo>(); _scannedSeries.AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (key, oldValue) =>
// Use normalization for key lookup due to parsing disparities
var existingKey = _scannedSeries.Keys.SingleOrDefault(k => k.ToLower() == info.Series.ToLower());
if (existingKey != null) info.Series = existingKey;
if (_scannedSeries.TryGetValue(info.Series, out var tempBag))
{ {
var existingInfos = tempBag.ToArray(); oldValue ??= new List<ParserInfo>();
foreach (var existingInfo in existingInfos) if (!oldValue.Contains(info))
{ {
newBag.Add(existingInfo); oldValue.Add(info);
}
}
else
{
tempBag = new ConcurrentBag<ParserInfo>();
} }
newBag.Add(info); return oldValue;
});
if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag))
{
_scannedSeries.TryAdd(info.Series, newBag);
}
} }
/// <summary> /// <summary>
@ -213,7 +201,7 @@ namespace API.Services
} }
if (string.IsNullOrEmpty(series.Summary) || forceUpdate) if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
{ {
series.Summary = ""; // TODO: Check if comicInfo.xml in file and parse metadata out. series.Summary = "";
} }
@ -235,8 +223,7 @@ namespace API.Services
}; };
} }
// TODO: Implement Test private int MinimumNumberFromRange(string range)
public int MinimumNumberFromRange(string range)
{ {
var tokens = range.Split("-"); var tokens = range.Split("-");
return Int32.Parse(tokens.Length >= 1 ? tokens[0] : range); return Int32.Parse(tokens.Length >= 1 ? tokens[0] : range);