mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
Bugfixes! (#157)
* More cases for parsing regex * Fixed a bug where chapter cover images weren't being updated due to a missed not. * Removed a piece of code that was needed for upgrading, since all beta users agreed to wipe db. * Fixed InProgress to properly respect order and show more recent activity first. Issue is with IEntityDate LastModified not updating in DataContext. * Updated dependencies to lastest stable. * LastModified on Volumes wasn't updating, validated it does update when data is changed. * Fixed #152 - Sorting issue when finding cover image. * Fixed #151 - Sort files during scan. * Fixed #161 - Remove files that don't exist from chapters during scan. * Fixed #155 - Ignore images that start with !, expand cover detection by checking for the word cover as well as folder, and some code cleanup to make code more concise. * Fixed #153 - Ensure that we persist series name changes and don't override on scanning. * Fixed a broken unit test
This commit is contained in:
parent
d3c14863d6
commit
b3ec8e8756
@ -1,4 +1,5 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using API.Comparators;
|
||||
using Xunit;
|
||||
|
||||
@ -15,6 +16,10 @@ namespace API.Tests.Comparers
|
||||
new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",},
|
||||
new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"}
|
||||
)]
|
||||
[InlineData(
|
||||
new[] {"[SCX-Scans]_Vandread_v02_Act02.zip", "[SCX-Scans]_Vandread_v02_Act01.zip",},
|
||||
new[] {"[SCX-Scans]_Vandread_v02_Act01.zip", "[SCX-Scans]_Vandread_v02_Act02.zip",}
|
||||
)]
|
||||
public void TestNaturalSortComparer(string[] input, string[] expected)
|
||||
{
|
||||
NaturalSortComparer nc = new NaturalSortComparer();
|
||||
@ -27,5 +32,36 @@ namespace API.Tests.Comparers
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
[Theory]
|
||||
[InlineData(
|
||||
new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"},
|
||||
new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"}
|
||||
)]
|
||||
[InlineData(
|
||||
new[] {"x2.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"},
|
||||
new[] {"x2.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"}
|
||||
)]
|
||||
[InlineData(
|
||||
new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",},
|
||||
new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"}
|
||||
)]
|
||||
[InlineData(
|
||||
new[] {"[SCX-Scans]_Vandread_v02_Act02.zip", "[SCX-Scans]_Vandread_v02_Act01.zip","[SCX-Scans]_Vandread_v02_Act07.zip",},
|
||||
new[] {"[SCX-Scans]_Vandread_v02_Act01.zip", "[SCX-Scans]_Vandread_v02_Act02.zip","[SCX-Scans]_Vandread_v02_Act07.zip",}
|
||||
)]
|
||||
public void TestNaturalSortComparerLinq(string[] input, string[] expected)
|
||||
{
|
||||
NaturalSortComparer nc = new NaturalSortComparer();
|
||||
var output = input.OrderBy(c => c, nc);
|
||||
|
||||
var i = 0;
|
||||
foreach (var s in output)
|
||||
{
|
||||
Assert.Equal(s, expected[i]);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -338,6 +338,7 @@ namespace API.Tests
|
||||
[InlineData("test.jpeg", true)]
|
||||
[InlineData("test.png", true)]
|
||||
[InlineData(".test.jpg", false)]
|
||||
[InlineData("!test.jpg", false)]
|
||||
public void IsImageTest(string filename, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, IsImage(filename));
|
||||
@ -359,6 +360,27 @@ namespace API.Tests
|
||||
Assert.Equal(expectedSeries, actual.Series);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Love Hina - Special.jpg", false)]
|
||||
[InlineData("folder.jpg", true)]
|
||||
[InlineData("DearS_v01_cover.jpg", true)]
|
||||
[InlineData("DearS_v01_covers.jpg", false)]
|
||||
[InlineData("!cover.jpg", true)]
|
||||
public void IsCoverImageTest(string inputPath, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, IsCoverImage(inputPath));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("__MACOSX/Love Hina - Special.jpg", true)]
|
||||
[InlineData("TEST/Love Hina - Special.jpg", false)]
|
||||
[InlineData("__macosx/Love Hina/", false)]
|
||||
[InlineData("MACOSX/Love Hina/", false)]
|
||||
public void HasBlacklistedFolderInPathTest(string inputPath, bool expected)
|
||||
{
|
||||
Assert.Equal(expected, HasBlacklistedFolderInPath(inputPath));
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public void ParseInfoTest()
|
||||
|
@ -8,23 +8,13 @@ namespace API.Comparators
|
||||
public class NaturalSortComparer : IComparer<string>, IDisposable
|
||||
{
|
||||
private readonly bool _isAscending;
|
||||
private Dictionary<string, string[]> _table = new();
|
||||
|
||||
public NaturalSortComparer(bool inAscendingOrder = true)
|
||||
{
|
||||
_isAscending = inAscendingOrder;
|
||||
}
|
||||
|
||||
#region IComparer<string> Members
|
||||
|
||||
public int Compare(string x, string y)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IComparer<string> Members
|
||||
|
||||
int IComparer<string>.Compare(string x, string y)
|
||||
{
|
||||
if (x == y)
|
||||
@ -81,10 +71,6 @@ namespace API.Comparators
|
||||
return x.CompareTo(y);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
private Dictionary<string, string[]> _table = new Dictionary<string, string[]>();
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
SuppressFinalize(this);
|
||||
|
27
API/Extensions/SeriesExtensions.cs
Normal file
27
API/Extensions/SeriesExtensions.cs
Normal file
@ -0,0 +1,27 @@
|
||||
using System.Collections.Generic;
|
||||
using API.Entities;
|
||||
|
||||
namespace API.Extensions
|
||||
{
|
||||
public static class SeriesExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Checks against all the name variables of the Series if it matches anything in the list.
|
||||
/// </summary>
|
||||
/// <param name="series"></param>
|
||||
/// <param name="list"></param>
|
||||
/// <returns></returns>
|
||||
public static bool NameInList(this Series series, IEnumerable<string> list)
|
||||
{
|
||||
foreach (var name in list)
|
||||
{
|
||||
if (name == series.Name || name == series.LocalizedName || name == series.OriginalName || name == series.NormalizedName)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
@ -15,6 +15,7 @@ namespace API.Parser
|
||||
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
private static readonly Regex FolderRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(cover|folder)(?![\w\d])", RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
|
||||
private static readonly Regex[] MangaVolumeRegex = new[]
|
||||
{
|
||||
@ -722,9 +723,9 @@ namespace API.Parser
|
||||
return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath));
|
||||
}
|
||||
|
||||
public static bool IsImage(string filePath)
|
||||
public static bool IsImage(string filePath, bool suppressExtraChecks = false)
|
||||
{
|
||||
if (filePath.StartsWith(".")) return false;
|
||||
if (filePath.StartsWith(".") || (!suppressExtraChecks && filePath.StartsWith("!"))) return false;
|
||||
return ImageRegex.IsMatch(Path.GetExtension(filePath));
|
||||
}
|
||||
|
||||
@ -744,6 +745,21 @@ namespace API.Parser
|
||||
return name.ToLower().Replace("-", "").Replace(" ", "").Replace(":", "");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests whether the file is a cover image such that: contains "cover", is named "folder", and is an image
|
||||
/// </summary>
|
||||
/// <param name="name"></param>
|
||||
/// <returns></returns>
|
||||
public static bool IsCoverImage(string name)
|
||||
{
|
||||
return IsImage(name, true) && (FolderRegex.IsMatch(name));
|
||||
}
|
||||
|
||||
public static bool HasBlacklistedFolderInPath(string path)
|
||||
{
|
||||
return path.Contains("__MACOSX");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
@ -6,6 +6,7 @@ using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Xml.Serialization;
|
||||
using API.Archive;
|
||||
using API.Comparators;
|
||||
using API.Extensions;
|
||||
using API.Interfaces.Services;
|
||||
using API.Services.Tasks;
|
||||
@ -24,11 +25,13 @@ namespace API.Services
|
||||
{
|
||||
private readonly ILogger<ArchiveService> _logger;
|
||||
private const int ThumbnailWidth = 320; // 153w x 230h
|
||||
private static readonly RecyclableMemoryStreamManager _streamManager = new RecyclableMemoryStreamManager();
|
||||
private static readonly RecyclableMemoryStreamManager _streamManager = new();
|
||||
private readonly NaturalSortComparer _comparer;
|
||||
|
||||
public ArchiveService(ILogger<ArchiveService> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
_comparer = new NaturalSortComparer();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@ -76,14 +79,14 @@ namespace API.Services
|
||||
{
|
||||
_logger.LogDebug("Using default compression handling");
|
||||
using ZipArchive archive = ZipFile.OpenRead(archivePath);
|
||||
return archive.Entries.Count(e => !e.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(e.FullName));
|
||||
return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName));
|
||||
}
|
||||
case ArchiveLibrary.SharpCompress:
|
||||
{
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
return archive.Entries.Count(entry => !entry.IsDirectory &&
|
||||
!(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
!Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
|
||||
&& Parser.Parser.IsImage(entry.Key));
|
||||
}
|
||||
case ArchiveLibrary.NotSupported:
|
||||
@ -121,8 +124,14 @@ namespace API.Services
|
||||
{
|
||||
_logger.LogDebug("Using default compression handling");
|
||||
using var archive = ZipFile.OpenRead(archivePath);
|
||||
var folder = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
||||
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && !x.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
|
||||
// NOTE: We can probably reduce our iteration by performing 1 filter on MACOSX then do our folder check and image chack.
|
||||
var folder = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
|
||||
&& Parser.Parser.IsImage(x.FullName)
|
||||
&& Parser.Parser.IsCoverImage(x.FullName));
|
||||
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName)
|
||||
&& !Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
|
||||
&& Parser.Parser.IsImage(x.FullName))
|
||||
.OrderBy(x => x.FullName, _comparer).ToList();
|
||||
var entry = folder ?? entries[0];
|
||||
|
||||
return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry);
|
||||
@ -131,9 +140,12 @@ namespace API.Services
|
||||
{
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory
|
||||
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& Parser.Parser.IsImage(entry.Key)), createThumbnail);
|
||||
var entries = archive.Entries
|
||||
.Where(entry => !entry.IsDirectory
|
||||
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
|
||||
&& Parser.Parser.IsImage(entry.Key))
|
||||
.OrderBy(x => x.Key, _comparer);
|
||||
return FindCoverImage(entries, createThumbnail);
|
||||
}
|
||||
case ArchiveLibrary.NotSupported:
|
||||
_logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath);
|
||||
@ -199,7 +211,7 @@ namespace API.Services
|
||||
// Sometimes ZipArchive will list the directory and others it will just keep it in the FullName
|
||||
return archive.Entries.Count > 0 &&
|
||||
!Path.HasExtension(archive.Entries.ElementAt(0).FullName) ||
|
||||
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !e.FullName.Contains("__MACOSX"));
|
||||
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName));
|
||||
}
|
||||
|
||||
private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg")
|
||||
@ -266,7 +278,7 @@ namespace API.Services
|
||||
{
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key))
|
||||
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && !Parser.Parser.HasBlacklistedFolderInPath(entry.Key) && Parser.Parser.IsXml(entry.Key))
|
||||
{
|
||||
using var ms = _streamManager.GetStream();
|
||||
entry.WriteTo(ms);
|
||||
@ -300,7 +312,7 @@ namespace API.Services
|
||||
{
|
||||
_logger.LogDebug("Using default compression handling");
|
||||
using var archive = ZipFile.OpenRead(archivePath);
|
||||
var entry = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
|
||||
var entry = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName) && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
|
||||
if (entry != null)
|
||||
{
|
||||
using var stream = entry.Open();
|
||||
@ -314,7 +326,7 @@ namespace API.Services
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
|
||||
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
|
||||
&& Parser.Parser.IsXml(entry.Key)));
|
||||
break;
|
||||
}
|
||||
@ -400,7 +412,7 @@ namespace API.Services
|
||||
_logger.LogDebug("Using SharpCompress compression handling");
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
|
||||
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
|
||||
&& Parser.Parser.IsImage(entry.Key)), extractPath);
|
||||
break;
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Comparators;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
@ -23,6 +24,7 @@ namespace API.Services.Tasks
|
||||
private readonly IArchiveService _archiveService;
|
||||
private readonly IMetadataService _metadataService;
|
||||
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
|
||||
private readonly NaturalSortComparer _naturalSort;
|
||||
|
||||
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
|
||||
IMetadataService metadataService)
|
||||
@ -31,6 +33,7 @@ namespace API.Services.Tasks
|
||||
_logger = logger;
|
||||
_archiveService = archiveService;
|
||||
_metadataService = metadataService;
|
||||
_naturalSort = new NaturalSortComparer(true);
|
||||
}
|
||||
|
||||
|
||||
@ -159,10 +162,13 @@ namespace API.Services.Tasks
|
||||
|
||||
// First, remove any series that are not in parsedSeries list
|
||||
var foundSeries = parsedSeries.Select(s => Parser.Parser.Normalize(s.Key)).ToList();
|
||||
var missingSeries = library.Series.Where(existingSeries =>
|
||||
!foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name)
|
||||
|| (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName))
|
||||
|| !parsedSeries.ContainsKey(existingSeries.OriginalName));
|
||||
// var missingSeries = library.Series.Where(existingSeries =>
|
||||
// !foundSeries.Contains(existingSeries.NormalizedName) || !parsedSeries.ContainsKey(existingSeries.Name)
|
||||
// || (existingSeries.LocalizedName != null && !parsedSeries.ContainsKey(existingSeries.LocalizedName))
|
||||
// || !parsedSeries.ContainsKey(existingSeries.OriginalName));
|
||||
|
||||
var missingSeries = library.Series.Where(existingSeries => !existingSeries.NameInList(foundSeries)
|
||||
|| !existingSeries.NameInList(parsedSeries.Keys));
|
||||
var removeCount = 0;
|
||||
foreach (var existingSeries in missingSeries)
|
||||
{
|
||||
@ -198,7 +204,7 @@ namespace API.Services.Tasks
|
||||
Parallel.ForEach(librarySeries, (series) =>
|
||||
{
|
||||
_logger.LogInformation("Processing series {SeriesName}", series.Name);
|
||||
UpdateVolumes(series, parsedSeries[series.Name].ToArray());
|
||||
UpdateVolumes(series, parsedSeries[series.OriginalName].ToArray());
|
||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||
});
|
||||
|
||||
@ -248,15 +254,14 @@ namespace API.Services.Tasks
|
||||
{
|
||||
var startingChapters = volume.Chapters.Count;
|
||||
|
||||
|
||||
// Add new chapters
|
||||
foreach (var info in parsedInfos)
|
||||
{
|
||||
var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
|
||||
// Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
|
||||
// also are treated like specials
|
||||
// also are treated like specials for UI grouping.
|
||||
_logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment);
|
||||
// If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest
|
||||
// NOTE: If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest
|
||||
// vs Historys strongest), this code will break and the duplicate will be skipped.
|
||||
Chapter chapter = null;
|
||||
try
|
||||
@ -312,6 +317,7 @@ namespace API.Services.Tasks
|
||||
|
||||
|
||||
|
||||
|
||||
// Remove chapters that aren't in parsedInfos or have no files linked
|
||||
var existingChapters = volume.Chapters.ToList();
|
||||
foreach (var existingChapter in existingChapters)
|
||||
@ -324,7 +330,16 @@ namespace API.Services.Tasks
|
||||
{
|
||||
volume.Chapters.Remove(existingChapter);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Ensure we remove any files that no longer exist AND order
|
||||
existingChapter.Files = existingChapter.Files
|
||||
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
|
||||
.OrderBy(f => f.FilePath, _naturalSort).ToList();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
|
||||
startingChapters, volume.Chapters.Count);
|
||||
|
Loading…
x
Reference in New Issue
Block a user