Feature/unit tests (#171)

* Removed a duplicate loop that was already done earlier in method.

* Normalize now replaces underscores

* Added more Parser cases, Added test case for SeriesExtension (Name in List), and added MergeNameTest and some TODOs for where tests should go

* Added a test for removal

* Fixed bad merge

Co-authored-by: Andrew Song <asong641@gmail.com>
This commit is contained in:
Joseph Milazzo 2021-04-13 10:24:44 -05:00 committed by GitHub
parent 6ba00477e7
commit d59d60d9ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 205 additions and 54 deletions

View File

@ -0,0 +1,26 @@
using API.Entities;
using API.Extensions;
using Xunit;
namespace API.Tests.Extensions
{
public class SeriesExtensionsTests
{
[Theory]
[InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker than Black"}, true)]
[InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker_than_Black"}, true)]
[InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, new [] {"Darker then Black!"}, false)]
public void NameInListTest(string[] seriesInput, string[] list, bool expected)
{
var series = new Series()
{
Name = seriesInput[0],
LocalizedName = seriesInput[1],
OriginalName = seriesInput[2],
NormalizedName = Parser.Parser.Normalize(seriesInput[0])
};
Assert.Equal(expected, series.NameInList(list));
}
}
}

View File

@ -198,6 +198,7 @@ namespace API.Tests
[InlineData("1", "001")]
[InlineData("10", "010")]
[InlineData("100", "100")]
[InlineData("4-8", "004-008")]
public void PadZerosTest(string input, string expected)
{
Assert.Equal(expected, PadZeros(input));
@ -266,6 +267,7 @@ namespace API.Tests
[Theory]
[InlineData("Darker Than Black", "darkerthanblack")]
[InlineData("Darker Than Black - Something", "darkerthanblacksomething")]
[InlineData("Darker Than_Black", "darkerthanblack")]
[InlineData("", "")]
public void NormalizeTest(string input, string expected)
{
@ -384,6 +386,14 @@ namespace API.Tests
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
}
[Theory]
[InlineData("image.png", MangaFormat.Image)]
[InlineData("image.cbz", MangaFormat.Archive)]
[InlineData("image.txt", MangaFormat.Unknown)]
public void ParseFormatTest(string inputFile, MangaFormat expected)
{
Assert.Equal(expected, ParseFormat(inputFile));
}
[Fact]
public void ParseInfoTest()

View File

@ -1,12 +1,17 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging;
using NSubstitute;
using NSubstitute.Extensions;
using Xunit;
using Xunit.Abstractions;
@ -28,33 +33,126 @@ namespace API.Tests.Services
_testOutputHelper = testOutputHelper;
_scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
_metadataService= Substitute.For<MetadataService>(_unitOfWork, _metadataLogger, _archiveService);
_libraryMock = new Library()
{
Id = 1,
Name = "Manga",
Folders = new List<FolderPath>()
{
new FolderPath()
{
Id = 1,
LastScanned = DateTime.Now,
LibraryId = 1,
Path = "E:/Manga"
}
},
LastModified = DateTime.Now,
Series = new List<Series>()
{
new Series()
{
Id = 0,
Name = "Darker Than Black"
}
}
};
// _libraryMock = new Library()
// {
// Id = 1,
// Name = "Manga",
// Folders = new List<FolderPath>()
// {
// new FolderPath()
// {
// Id = 1,
// LastScanned = DateTime.Now,
// LibraryId = 1,
// Path = "E:/Manga"
// }
// },
// LastModified = DateTime.Now,
// Series = new List<Series>()
// {
// new Series()
// {
// Id = 0,
// Name = "Darker Than Black"
// }
// }
// };
}
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
var infos = new Dictionary<string, List<ParserInfo>>();
AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10"});
var existingSeries = new List<Series>();
existingSeries.Add(new Series()
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = Parser.Parser.Normalize("Cage of Eden")
});
existingSeries.Add(new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = Parser.Parser.Normalize("Darker Than Black")
});
var expectedSeries = new List<Series>();
Assert.Empty(scannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
[Theory]
[InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
[InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
[InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker Than Black!")]
[InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
{
var scannerService = new ScannerService(_unitOfWork, _logger, _archiveService, _metadataService);
var collectedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
foreach (var seriesName in existingSeriesNames)
{
AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName});
}
var actualName = scannerService.MergeName(collectedSeries, new ParserInfo()
{
Series = parsedInfoName
});
Assert.Equal(expected, actualName);
}
private void AddToParsedInfo(IDictionary<string, List<ParserInfo>> collectedSeries, ParserInfo info)
{
if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>))
{
((ConcurrentDictionary<string, List<ParserInfo>>) collectedSeries).AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (_, oldValue) =>
{
oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info))
{
oldValue.Add(info);
}
return oldValue;
});
}
else
{
if (!collectedSeries.ContainsKey(info.Series))
{
collectedSeries.Add(info.Series, new List<ParserInfo>() {info});
}
else
{
var list = collectedSeries[info.Series];
if (!list.Contains(info))
{
list.Add(info);
}
collectedSeries[info.Series] = list;
}
}
}
// [Fact]
// public void ExistingOrDefault_Should_BeFromLibrary()
// {

View File

@ -15,7 +15,7 @@ namespace API.Extensions
{
foreach (var name in list)
{
if (name == series.Name || name == series.LocalizedName || name == series.OriginalName || name == series.NormalizedName)
if (Parser.Parser.Normalize(name) == series.NormalizedName || name == series.Name || name == series.LocalizedName || name == series.OriginalName)
{
return true;
}

View File

@ -409,7 +409,7 @@ namespace API.Parser
return ret.Series == string.Empty ? null : ret;
}
private static MangaFormat ParseFormat(string filePath)
public static MangaFormat ParseFormat(string filePath)
{
if (IsArchive(filePath)) return MangaFormat.Archive;
if (IsImage(filePath)) return MangaFormat.Image;
@ -742,7 +742,7 @@ namespace API.Parser
public static string Normalize(string name)
{
return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", "");
return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", "").Replace("_", "");
}
/// <summary>

View File

@ -156,26 +156,15 @@ namespace API.Services.Tasks
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
}
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
{
if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries));
// First, remove any series that are not in parsedSeries list
var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
// var missingSeries = library.Series.Where(existingSeries =>
// !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name)
// || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName))
// || !parsedSeries.ContainsKey(existingSeries.OriginalName));
var missingSeries = library.Series.Where(existingSeries => !existingSeries.NameInList(foundSeries)
|| !existingSeries.NameInList(parsedSeries.Keys));
var removeCount = 0;
foreach (var existingSeries in missingSeries)
{
library.Series?.Remove(existingSeries);
removeCount += 1;
}
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries);
var removeCount = RemoveMissingSeries(library.Series, missingSeries);
_logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk", removeCount);
// Add new series that have parsedInfos
foreach (var (key, _) in parsedSeries)
@ -207,9 +196,29 @@ namespace API.Services.Tasks
UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
});
}
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<string, List<ParserInfo>> parsedSeries)
{
var foundSeries = parsedSeries.Select(s => s.Key).ToList();
var missingSeries = existingSeries.Where(existingSeries => !existingSeries.NameInList(foundSeries)
|| !existingSeries.NameInList(parsedSeries.Keys));
return missingSeries;
}
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
public int RemoveMissingSeries(ICollection<Series> existingSeries, IEnumerable<Series> missingSeries)
{
var removeCount = 0;
//library.Series = library.Series.Except(missingSeries).ToList();
if (existingSeries == null || existingSeries.Count == 0) return 0;
foreach (var existing in missingSeries)
{
existingSeries.Remove(existing);
removeCount += 1;
}
return removeCount;
}
private void UpdateVolumes(Series series, ParserInfo[] parsedInfos)
@ -266,6 +275,7 @@ namespace API.Services.Tasks
Chapter chapter = null;
try
{
// TODO: Extract to FindExistingChapter()
chapter = specialTreatment
? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename
|| (c.Files.Select(f => f.FilePath)
@ -317,7 +327,7 @@ namespace API.Services.Tasks
// TODO: Extract to
// Remove chapters that aren't in parsedInfos or have no files linked
var existingChapters = volume.Chapters.ToList();
foreach (var existingChapter in existingChapters)
@ -354,15 +364,7 @@ namespace API.Services.Tasks
if (info.Series == string.Empty) return;
// Check if normalized info.Series already exists and if so, update info to use that name instead
var normalizedSeries = Parser.Parser.Normalize(info.Series);
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
.Key;
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
{
_logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName);
info.Series = existingName;
}
info.Series = MergeName(_scannedSeries, info);
_scannedSeries.AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (_, oldValue) =>
{
@ -376,6 +378,21 @@ namespace API.Services.Tasks
});
}
public string MergeName(ConcurrentDictionary<string,List<ParserInfo>> collectedSeries, ParserInfo info)
{
var normalizedSeries = Parser.Parser.Normalize(info.Series);
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
.Key;
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
{
_logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName);
return existingName;
}
return info.Series;
}
/// <summary>
/// Processes files found during a library scan.
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.