mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
A lot of random changes to try and speed up SharpCompress.
This commit is contained in:
parent
d73bd22db2
commit
d724a8f178
@ -50,6 +50,8 @@ namespace API.Tests
|
||||
[InlineData("VanDread-v01-c001[MD].zip", "1")]
|
||||
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
|
||||
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
|
||||
[InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
|
||||
[InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
|
||||
public void ParseVolumeTest(string filename, string expected)
|
||||
{
|
||||
Assert.Equal(expected, ParseVolume(filename));
|
||||
|
41
API/Archive/Archive.cs
Normal file
41
API/Archive/Archive.cs
Normal file
@ -0,0 +1,41 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using SharpCompress.Archives;
|
||||
|
||||
namespace API.Archive
|
||||
{
|
||||
public static class Archive
|
||||
{
|
||||
/// <summary>
|
||||
/// Checks if a File can be opened. Requires up to 2 opens of the filestream.
|
||||
/// </summary>
|
||||
/// <param name="archivePath"></param>
|
||||
/// <returns></returns>
|
||||
public static ArchiveLibrary CanOpen(string archivePath)
|
||||
{
|
||||
if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath)) return ArchiveLibrary.NotSupported;
|
||||
|
||||
try
|
||||
{
|
||||
using var a2 = ZipFile.OpenRead(archivePath);
|
||||
return ArchiveLibrary.Default;
|
||||
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var a1 = ArchiveFactory.Open(archivePath);
|
||||
return ArchiveLibrary.SharpCompress;
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
return ArchiveLibrary.NotSupported;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
12
API/Archive/ArchiveLibrary.cs
Normal file
12
API/Archive/ArchiveLibrary.cs
Normal file
@ -0,0 +1,12 @@
|
||||
namespace API.Archive
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents which library should handle opening this library
|
||||
/// </summary>
|
||||
public enum ArchiveLibrary
|
||||
{
|
||||
NotSupported = 0,
|
||||
SharpCompress = 1,
|
||||
Default = 2
|
||||
}
|
||||
}
|
10
API/Archive/ArchiveMetadata.cs
Normal file
10
API/Archive/ArchiveMetadata.cs
Normal file
@ -0,0 +1,10 @@
|
||||
namespace API.Archive
|
||||
{
|
||||
public class ArchiveMetadata
|
||||
{
|
||||
public byte[] CoverImage { get; set; }
|
||||
public string Summary { get; set; }
|
||||
public int Pages { get; set; }
|
||||
//public string Format { get; set; }
|
||||
}
|
||||
}
|
@ -5,7 +5,7 @@ namespace API.DTOs
|
||||
public class MangaFileDto
|
||||
{
|
||||
public string FilePath { get; init; }
|
||||
public int NumberOfPages { get; init; }
|
||||
public int Pages { get; init; }
|
||||
public MangaFormat Format { get; init; }
|
||||
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
namespace API.Interfaces.Services
|
||||
using API.Archive;
|
||||
|
||||
namespace API.Interfaces.Services
|
||||
{
|
||||
public interface IArchiveService
|
||||
{
|
||||
@ -7,5 +9,6 @@
|
||||
byte[] GetCoverImage(string filepath, bool createThumbnail = false);
|
||||
bool IsValidArchive(string archivePath);
|
||||
string GetSummaryInfo(string archivePath);
|
||||
ArchiveMetadata GetArchiveData(string archivePath, bool createThumbnail);
|
||||
}
|
||||
}
|
@ -26,6 +26,10 @@ namespace API.Parser
|
||||
new Regex(
|
||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+(-\d+)?)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||
// Kodomo no Jikan vol. 10
|
||||
new Regex(
|
||||
@"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(-\d+)?)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
|
||||
new Regex(
|
||||
@"(vol\.? ?)(?<Volume>0*[1-9]+)",
|
||||
|
@ -1,9 +1,13 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Xml.Serialization;
|
||||
using API.Archive;
|
||||
using API.Extensions;
|
||||
using API.Interfaces.Services;
|
||||
using API.Services.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
@ -42,37 +46,50 @@ namespace API.Services
|
||||
var count = 0;
|
||||
try
|
||||
{
|
||||
using Stream stream = File.OpenRead(archivePath);
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogDebug("{ArchivePath}'s Type: {ArchiveType}", archivePath, reader.ArchiveType);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
_logger.LogError(ex, "Could not parse the archive. Please validate it is not corrupted");
|
||||
return 0;
|
||||
}
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
return archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)).Count();
|
||||
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory && Parser.Parser.IsImage(reader.Entry.Key))
|
||||
{
|
||||
count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// using Stream stream = File.OpenRead(archivePath);
|
||||
// using (var reader = ReaderFactory.Open(stream))
|
||||
// {
|
||||
// try
|
||||
// {
|
||||
// _logger.LogDebug("{ArchivePath}'s Type: {ArchiveType}", archivePath, reader.ArchiveType);
|
||||
// }
|
||||
// catch (InvalidOperationException ex)
|
||||
// {
|
||||
// _logger.LogError(ex, "Could not parse the archive. Please validate it is not corrupted, {ArchivePath}", archivePath);
|
||||
// return 0;
|
||||
// }
|
||||
//
|
||||
// while (reader.MoveToNextEntry())
|
||||
// {
|
||||
// if (!reader.Entry.IsDirectory && Parser.Parser.IsImage(reader.Entry.Key))
|
||||
// {
|
||||
// count++;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
|
||||
_logger.LogError(ex, "[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
|
||||
return 0;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
public ArchiveMetadata GetArchiveData(string archivePath, bool createThumbnail)
|
||||
{
|
||||
return new ArchiveMetadata()
|
||||
{
|
||||
Pages = GetNumberOfPagesFromArchive(archivePath),
|
||||
//Summary = GetSummaryInfo(archivePath),
|
||||
CoverImage = GetCoverImage(archivePath, createThumbnail)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates byte array of cover image.
|
||||
/// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless
|
||||
@ -92,7 +109,7 @@ namespace API.Services
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath);
|
||||
_logger.LogError(ex, "[GetCoverImage] There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath);
|
||||
}
|
||||
|
||||
return Array.Empty<byte>();
|
||||
@ -140,7 +157,7 @@ namespace API.Services
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
|
||||
_logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
|
||||
}
|
||||
|
||||
return Array.Empty<byte>();
|
||||
@ -187,6 +204,7 @@ namespace API.Services
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public string GetSummaryInfo(string archivePath)
|
||||
{
|
||||
var summary = string.Empty;
|
||||
@ -228,11 +246,11 @@ namespace API.Services
|
||||
return info.Summary;
|
||||
}
|
||||
|
||||
_logger.LogError("Could not parse archive file");
|
||||
_logger.LogError("[GetSummaryInfo] Could not parse archive file: {Filepath}", archivePath);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}", archivePath);
|
||||
_logger.LogError(ex, "[GetSummaryInfo] There was an exception when reading archive stream: {Filepath}", archivePath);
|
||||
}
|
||||
|
||||
return summary;
|
||||
@ -249,6 +267,9 @@ namespace API.Services
|
||||
Overwrite = false
|
||||
});
|
||||
}
|
||||
|
||||
// using ZipArchive archive = ZipFile.OpenRead(archivePath);
|
||||
// archive.ExtractToDirectory(extractPath, true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@ -268,7 +289,7 @@ namespace API.Services
|
||||
var sw = Stopwatch.StartNew();
|
||||
using var archive = ArchiveFactory.Open(archivePath);
|
||||
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
|
||||
_logger.LogDebug("[Fallback] Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
|
||||
_logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
|
||||
}
|
||||
}
|
||||
}
|
@ -36,8 +36,10 @@ namespace API.Services
|
||||
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
|
||||
if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
|
||||
}
|
||||
// NOTE: Can I put page calculation here? chapter.Pages = chapter.Files.Sum(f => f.Pages);
|
||||
}
|
||||
|
||||
|
||||
public void UpdateMetadata(Volume volume, bool forceUpdate)
|
||||
{
|
||||
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
|
||||
@ -45,14 +47,23 @@ namespace API.Services
|
||||
// TODO: Create a custom sorter for Chapters so it's consistent across the application
|
||||
volume.Chapters ??= new List<Chapter>();
|
||||
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
|
||||
|
||||
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
|
||||
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
|
||||
// Skip calculating Cover Image (I/O) if the chapter already has it set
|
||||
if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage))
|
||||
{
|
||||
if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
volume.CoverImage = firstChapter.CoverImage;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateMetadata(Series series, bool forceUpdate)
|
||||
{
|
||||
// TODO: this doesn't actually invoke finding a new cover. Also all these should be groupped ideally so we limit
|
||||
// TODO: this doesn't actually invoke finding a new cover. Also all these should be grouped ideally so we limit
|
||||
// disk I/O to one method.
|
||||
if (series == null) return;
|
||||
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
|
||||
|
@ -224,7 +224,7 @@ namespace API.Services.Tasks
|
||||
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
||||
UpdateChapters(volume, infos);
|
||||
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
||||
_metadataService.UpdateMetadata(volume, _forceUpdate);
|
||||
// _metadataService.UpdateMetadata(volume, _forceUpdate); // NOTE: Testing removing here. We do at the end of all DB work
|
||||
}
|
||||
|
||||
|
||||
@ -285,7 +285,7 @@ namespace API.Services.Tasks
|
||||
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
|
||||
chapter.Range = info.Chapters;
|
||||
chapter.Pages = chapter.Files.Sum(f => f.Pages);
|
||||
_metadataService.UpdateMetadata(chapter, _forceUpdate);
|
||||
//_metadataService.UpdateMetadata(chapter, _forceUpdate); // NOTE: Testing removing here. We do at the end of all DB work
|
||||
}
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user