diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs
index fb985a24a..2aa0f30ff 100644
--- a/API.Tests/ParserTest.cs
+++ b/API.Tests/ParserTest.cs
@@ -50,6 +50,8 @@ namespace API.Tests
[InlineData("VanDread-v01-c001[MD].zip", "1")]
[InlineData("Ichiban_Ushiro_no_Daimaou_v04_ch27_[VISCANS].zip", "4")]
[InlineData("Mob Psycho 100 v02 (2019) (Digital) (Shizu).cbz", "2")]
+ [InlineData("Kodomo no Jikan vol. 1.cbz", "1")]
+ [InlineData("Kodomo no Jikan vol. 10.cbz", "10")]
public void ParseVolumeTest(string filename, string expected)
{
Assert.Equal(expected, ParseVolume(filename));
diff --git a/API/Archive/Archive.cs b/API/Archive/Archive.cs
new file mode 100644
index 000000000..146ae3ffc
--- /dev/null
+++ b/API/Archive/Archive.cs
@@ -0,0 +1,41 @@
+using System;
+using System.IO;
+using System.IO.Compression;
+using SharpCompress.Archives;
+
+namespace API.Archive
+{
+ public static class Archive
+ {
+ ///
+ /// Checks if a File can be opened. Requires up to 2 opens of the filestream.
+ ///
+ ///
+ ///
+ public static ArchiveLibrary CanOpen(string archivePath)
+ {
+ if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath)) return ArchiveLibrary.NotSupported;
+
+ try
+ {
+ using var a2 = ZipFile.OpenRead(archivePath);
+ return ArchiveLibrary.Default;
+
+ }
+ catch (Exception)
+ {
+ try
+ {
+ using var a1 = ArchiveFactory.Open(archivePath);
+ return ArchiveLibrary.SharpCompress;
+ }
+ catch (Exception)
+ {
+ return ArchiveLibrary.NotSupported;
+ }
+ }
+ }
+
+
+ }
+}
\ No newline at end of file
diff --git a/API/Archive/ArchiveLibrary.cs b/API/Archive/ArchiveLibrary.cs
new file mode 100644
index 000000000..2d05a7a55
--- /dev/null
+++ b/API/Archive/ArchiveLibrary.cs
@@ -0,0 +1,12 @@
+namespace API.Archive
+{
+ ///
+ /// Represents which library should handle opening this library
+ ///
+ public enum ArchiveLibrary
+ {
+ NotSupported = 0,
+ SharpCompress = 1,
+ Default = 2
+ }
+}
\ No newline at end of file
diff --git a/API/Archive/ArchiveMetadata.cs b/API/Archive/ArchiveMetadata.cs
new file mode 100644
index 000000000..3f6b5d03b
--- /dev/null
+++ b/API/Archive/ArchiveMetadata.cs
@@ -0,0 +1,10 @@
+namespace API.Archive
+{
+ public class ArchiveMetadata
+ {
+ public byte[] CoverImage { get; set; }
+ public string Summary { get; set; }
+ public int Pages { get; set; }
+ //public string Format { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/API/DTOs/MangaFileDto.cs b/API/DTOs/MangaFileDto.cs
index d7f5d5034..786f85df7 100644
--- a/API/DTOs/MangaFileDto.cs
+++ b/API/DTOs/MangaFileDto.cs
@@ -5,7 +5,7 @@ namespace API.DTOs
public class MangaFileDto
{
public string FilePath { get; init; }
- public int NumberOfPages { get; init; }
+ public int Pages { get; init; }
public MangaFormat Format { get; init; }
}
diff --git a/API/Interfaces/Services/IArchiveService.cs b/API/Interfaces/Services/IArchiveService.cs
index 74f01279b..427b16f37 100644
--- a/API/Interfaces/Services/IArchiveService.cs
+++ b/API/Interfaces/Services/IArchiveService.cs
@@ -1,4 +1,6 @@
-namespace API.Interfaces.Services
+using API.Archive;
+
+namespace API.Interfaces.Services
{
public interface IArchiveService
{
@@ -7,5 +9,6 @@
byte[] GetCoverImage(string filepath, bool createThumbnail = false);
bool IsValidArchive(string archivePath);
string GetSummaryInfo(string archivePath);
+ ArchiveMetadata GetArchiveData(string archivePath, bool createThumbnail);
}
}
\ No newline at end of file
diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs
index 785de6f3b..aad2bd557 100644
--- a/API/Parser/Parser.cs
+++ b/API/Parser/Parser.cs
@@ -26,6 +26,10 @@ namespace API.Parser
new Regex(
@"(?.*)(\b|_)v(?\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
+ // Kodomo no Jikan vol. 10
+ new Regex(
+ @"(?.*)(\b|_)(vol\.? ?)(?\d+(-\d+)?)",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex(
@"(vol\.? ?)(?0*[1-9]+)",
diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs
index 4037a3ea5..f608295c6 100644
--- a/API/Services/ArchiveService.cs
+++ b/API/Services/ArchiveService.cs
@@ -1,9 +1,13 @@
using System;
+using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
+using System.IO.Compression;
using System.Linq;
using System.Xml.Serialization;
+using API.Archive;
+using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging;
@@ -42,37 +46,50 @@ namespace API.Services
var count = 0;
try
{
- using Stream stream = File.OpenRead(archivePath);
- using (var reader = ReaderFactory.Open(stream))
- {
- try
- {
- _logger.LogDebug("{ArchivePath}'s Type: {ArchiveType}", archivePath, reader.ArchiveType);
- }
- catch (InvalidOperationException ex)
- {
- _logger.LogError(ex, "Could not parse the archive. Please validate it is not corrupted");
- return 0;
- }
-
- while (reader.MoveToNextEntry())
- {
- if (!reader.Entry.IsDirectory && Parser.Parser.IsImage(reader.Entry.Key))
- {
- count++;
- }
- }
- }
+ using var archive = ArchiveFactory.Open(archivePath);
+ return archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)).Count();
+
+ // using Stream stream = File.OpenRead(archivePath);
+ // using (var reader = ReaderFactory.Open(stream))
+ // {
+ // try
+ // {
+ // _logger.LogDebug("{ArchivePath}'s Type: {ArchiveType}", archivePath, reader.ArchiveType);
+ // }
+ // catch (InvalidOperationException ex)
+ // {
+ // _logger.LogError(ex, "Could not parse the archive. Please validate it is not corrupted, {ArchivePath}", archivePath);
+ // return 0;
+ // }
+ //
+ // while (reader.MoveToNextEntry())
+ // {
+ // if (!reader.Entry.IsDirectory && Parser.Parser.IsImage(reader.Entry.Key))
+ // {
+ // count++;
+ // }
+ // }
+ // }
}
catch (Exception ex)
{
- _logger.LogError(ex, "There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
+ _logger.LogError(ex, "[GetNumberOfPagesFromArchive] There was an exception when reading archive stream: {ArchivePath}. Defaulting to 0 pages", archivePath);
return 0;
}
return count;
}
+ public ArchiveMetadata GetArchiveData(string archivePath, bool createThumbnail)
+ {
+ return new ArchiveMetadata()
+ {
+ Pages = GetNumberOfPagesFromArchive(archivePath),
+ //Summary = GetSummaryInfo(archivePath),
+ CoverImage = GetCoverImage(archivePath, createThumbnail)
+ };
+ }
+
///
/// Generates byte array of cover image.
/// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless
@@ -92,7 +109,7 @@ namespace API.Services
}
catch (Exception ex)
{
- _logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath);
+ _logger.LogError(ex, "[GetCoverImage] There was an exception when reading archive stream: {Filepath}. Defaulting to no cover image", filepath);
}
return Array.Empty();
@@ -140,7 +157,7 @@ namespace API.Services
}
catch (Exception ex)
{
- _logger.LogError(ex, "There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
+ _logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
}
return Array.Empty();
@@ -187,6 +204,7 @@ namespace API.Services
return null;
}
+
public string GetSummaryInfo(string archivePath)
{
var summary = string.Empty;
@@ -228,11 +246,11 @@ namespace API.Services
return info.Summary;
}
- _logger.LogError("Could not parse archive file");
+ _logger.LogError("[GetSummaryInfo] Could not parse archive file: {Filepath}", archivePath);
}
catch (Exception ex)
{
- _logger.LogError(ex, "There was an exception when reading archive stream: {Filepath}", archivePath);
+ _logger.LogError(ex, "[GetSummaryInfo] There was an exception when reading archive stream: {Filepath}", archivePath);
}
return summary;
@@ -249,6 +267,9 @@ namespace API.Services
Overwrite = false
});
}
+
+ // using ZipArchive archive = ZipFile.OpenRead(archivePath);
+ // archive.ExtractToDirectory(extractPath, true);
}
///
@@ -268,7 +289,7 @@ namespace API.Services
var sw = Stopwatch.StartNew();
using var archive = ArchiveFactory.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
- _logger.LogDebug("[Fallback] Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
+ _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
}
}
}
\ No newline at end of file
diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs
index c762bdbf7..98a5660ed 100644
--- a/API/Services/MetadataService.cs
+++ b/API/Services/MetadataService.cs
@@ -36,8 +36,10 @@ namespace API.Services
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null) chapter.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
}
+ // NOTE: Can I put page calculation here? chapter.Pages = chapter.Files.Sum(f => f.Pages);
}
+
public void UpdateMetadata(Volume volume, bool forceUpdate)
{
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
@@ -45,14 +47,23 @@ namespace API.Services
// TODO: Create a custom sorter for Chapters so it's consistent across the application
volume.Chapters ??= new List();
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
+
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
- if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
+ // Skip calculating Cover Image (I/O) if the chapter already has it set
+ if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage))
+ {
+ if (firstFile != null) volume.CoverImage = _archiveService.GetCoverImage(firstFile.FilePath, true);
+ }
+ else
+ {
+ volume.CoverImage = firstChapter.CoverImage;
+ }
}
}
public void UpdateMetadata(Series series, bool forceUpdate)
{
- // TODO: this doesn't actually invoke finding a new cover. Also all these should be groupped ideally so we limit
+ // TODO: this doesn't actually invoke finding a new cover. Also all these should be grouped ideally so we limit
// disk I/O to one method.
if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs
index 686b96dc1..0a46bb383 100644
--- a/API/Services/Tasks/ScannerService.cs
+++ b/API/Services/Tasks/ScannerService.cs
@@ -224,7 +224,7 @@ namespace API.Services.Tasks
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages);
- _metadataService.UpdateMetadata(volume, _forceUpdate);
+ // _metadataService.UpdateMetadata(volume, _forceUpdate); // NOTE: Testing removing here. We do at the end of all DB work
}
@@ -285,7 +285,7 @@ namespace API.Services.Tasks
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
chapter.Range = info.Chapters;
chapter.Pages = chapter.Files.Sum(f => f.Pages);
- _metadataService.UpdateMetadata(chapter, _forceUpdate);
+ //_metadataService.UpdateMetadata(chapter, _forceUpdate); // NOTE: Testing removing here. We do at the end of all DB work
}