diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs
index d551c6393..aa2c241a4 100644
--- a/API.Tests/ParserTest.cs
+++ b/API.Tests/ParserTest.cs
@@ -1,3 +1,4 @@
+using API.Parser;
using Xunit;
using static API.Parser.Parser;
@@ -15,8 +16,10 @@ namespace API.Tests
//[InlineData("Dance in the Vampire Bund v16-17 (Digital) (NiceDragon)", "16-17")]
[InlineData("Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz", "1")]
[InlineData("v001", "1")]
+ [InlineData("No Volume", "0")]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip", "1")]
+ [InlineData("Tonikaku Cawaii [Volume 11].cbz", "11")]
public void ParseVolumeTest(string filename, string expected)
{
Assert.Equal(expected, ParseVolume(filename));
@@ -33,11 +36,20 @@ namespace API.Tests
[InlineData("v001", "")]
[InlineData("U12 (Under 12) Vol. 0001 Ch. 0001 - Reiwa Scans (gb)", "U12 (Under 12)")]
[InlineData("Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)", "Akame ga KILL! ZERO")]
+ [InlineData("APOSIMZ 017 (2018) (Digital) (danke-Empire).cbz", "APOSIMZ")]
+ [InlineData("Akiiro Bousou Biyori - 01.jpg", "Akiiro Bousou Biyori")]
+ [InlineData("Beelzebub_172_RHS.zip", "Beelzebub")]
+ [InlineData("Dr. STONE 136 (2020) (Digital) (LuCaZ).cbz", "Dr. STONE")]
+ [InlineData("Cynthia the Mission 29.rar", "Cynthia the Mission")]
+ [InlineData("Darling in the FranXX - Volume 01.cbz", "Darling in the FranXX")]
+ [InlineData("Darwin's Game - Volume 14 (F).cbz", "Darwin's Game")]
+ [InlineData("[BAA]_Darker_than_Black_c7.zip", "Darker than Black")]
+ [InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip", "Kedouin Makoto - Corpse Party Musume")]
public void ParseSeriesTest(string filename, string expected)
{
Assert.Equal(expected, ParseSeries(filename));
}
-
+
[Theory]
[InlineData("Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)", "1")]
[InlineData("My Girlfriend Is Shobitch v01 - ch. 09 - pg. 008.png", "9")]
@@ -49,6 +61,8 @@ namespace API.Tests
[InlineData("c001", "1")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.12.zip", "12")]
[InlineData("Adding volume 1 with File: Ana Satsujin Vol. 1 Ch. 5 - Manga Box (gb).cbz", "5")]
+ [InlineData("Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz", "18")]
+ [InlineData("Cynthia The Mission - c000-006 (v06) [Desudesu&Brolen].zip", "0-6")]
public void ParseChaptersTest(string filename, string expected)
{
Assert.Equal(expected, ParseChapter(filename));
@@ -85,10 +99,11 @@ namespace API.Tests
[Theory]
[InlineData("test.cbz", true)]
- [InlineData("test.cbr", true)]
+ [InlineData("test.cbr", false)]
[InlineData("test.zip", true)]
- [InlineData("test.rar", true)]
+ [InlineData("test.rar", false)]
[InlineData("test.rar.!qb", false)]
+ [InlineData("[shf-ma-khs-aqs]negi_pa_vol15007.jpg", false)]
public void IsArchiveTest(string input, bool expected)
{
Assert.Equal(expected, IsArchive(input));
diff --git a/API/API.csproj b/API/API.csproj
index 73575c864..8c3278449 100644
--- a/API/API.csproj
+++ b/API/API.csproj
@@ -22,6 +22,7 @@
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs
index 71a8fcb69..1406ce1e7 100644
--- a/API/Controllers/AccountController.cs
+++ b/API/Controllers/AccountController.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using System.Linq;
using System.Threading.Tasks;
using API.Constants;
using API.DTOs;
@@ -76,15 +77,14 @@ namespace API.Controllers
if (registerDto.IsAdmin)
{
_logger.LogInformation($"{user.UserName} is being registered as admin. Granting access to all libraries.");
- var libraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync();
+ var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList();
foreach (var lib in libraries)
{
lib.AppUsers ??= new List();
lib.AppUsers.Add(user);
}
+ if (libraries.Any() && !await _unitOfWork.Complete()) _logger.LogInformation("There was an issue granting library access. Please do this manually.");
}
-
- if (!await _unitOfWork.Complete()) _logger.LogInformation("There was an issue granting library access. Please do this manually.");
return new UserDto
{
@@ -97,7 +97,11 @@ namespace API.Controllers
public async Task> Login(LoginDto loginDto)
{
var user = await _userManager.Users
- .SingleOrDefaultAsync(x => x.UserName == loginDto.Username.ToLower());
+ .SingleOrDefaultAsync(x => x.NormalizedUserName == loginDto.Username.ToUpper());
+
+ var debugUsers = await _userManager.Users.Select(x => x.NormalizedUserName).ToListAsync();
+
+ _logger.LogInformation($"All Users: {String.Join(",", debugUsers)}");
if (user == null) return Unauthorized("Invalid username");
diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs
index 445a28107..fcd945c7c 100644
--- a/API/Controllers/SeriesController.cs
+++ b/API/Controllers/SeriesController.cs
@@ -60,6 +60,14 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.SeriesRepository.GetVolumeDtoAsync(volumeId, user.Id));
}
+
+ [Authorize(Policy = "RequireAdminRole")]
+ [HttpPost("scan")]
+ public ActionResult Scan(int libraryId, int seriesId)
+ {
+ _taskScheduler.ScanSeries(libraryId, seriesId);
+ return Ok();
+ }
[HttpPost("update-rating")]
public async Task UpdateSeriesRating(UpdateSeriesRatingDto updateSeriesRatingDto)
diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs
index ad7b79fd3..910b10f89 100644
--- a/API/Controllers/SettingsController.cs
+++ b/API/Controllers/SettingsController.cs
@@ -57,8 +57,7 @@ namespace API.Controllers
// TODO: Figure out how to handle a change. This means that on clean, we need to clean up old cache
// directory and new one, but what if someone is reading?
// I can just clean both always, /cache/ is an owned folder, so users shouldn't use it.
-
- _taskScheduler.ClearCache();
+
//_dataContext.ServerSetting.Update
return BadRequest("Not Implemented");
diff --git a/API/DTOs/RegisterDto.cs b/API/DTOs/RegisterDto.cs
index 61fd26f96..5ff816522 100644
--- a/API/DTOs/RegisterDto.cs
+++ b/API/DTOs/RegisterDto.cs
@@ -7,7 +7,7 @@ namespace API.DTOs
[Required]
public string Username { get; set; }
[Required]
- [StringLength(8, MinimumLength = 4)]
+ [StringLength(16, MinimumLength = 4)]
public string Password { get; set; }
public bool IsAdmin { get; set; }
}
diff --git a/API/DTOs/ResetPasswordDto.cs b/API/DTOs/ResetPasswordDto.cs
index f486f4349..535d0df2f 100644
--- a/API/DTOs/ResetPasswordDto.cs
+++ b/API/DTOs/ResetPasswordDto.cs
@@ -7,7 +7,7 @@ namespace API.DTOs
[Required]
public string UserName { get; init; }
[Required]
- [StringLength(8, MinimumLength = 4)]
+ [StringLength(16, MinimumLength = 4)]
public string Password { get; init; }
}
}
\ No newline at end of file
diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs
index a85435df2..436736439 100644
--- a/API/Data/LibraryRepository.cs
+++ b/API/Data/LibraryRepository.cs
@@ -53,15 +53,6 @@ namespace API.Data
.ToListAsync();
}
- public async Task GetLibraryForNameAsync(string libraryName)
- {
- return await _context.Library
- .Where(x => x.Name == libraryName)
- .Include(f => f.Folders)
- .Include(s => s.Series)
- .SingleAsync();
- }
-
public async Task DeleteLibrary(int libraryId)
{
var library = await GetLibraryForIdAsync(libraryId);
diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs
index 86d70c197..9ad42b61c 100644
--- a/API/Data/Seed.cs
+++ b/API/Data/Seed.cs
@@ -31,13 +31,14 @@ namespace API.Data
public static async Task SeedSettings(DataContext context)
{
- IList defaultSettings = new List()
- {
- new ServerSetting() {Key = "CacheDirectory", Value = CacheService.CacheDirectory}
- };
-
- await context.ServerSetting.AddRangeAsync(defaultSettings);
- await context.SaveChangesAsync();
+ // NOTE: This needs to check if settings already exists before inserting.
+ // IList defaultSettings = new List()
+ // {
+ // new ServerSetting() {Key = "CacheDirectory", Value = CacheService.CacheDirectory}
+ // };
+ //
+ // await context.ServerSetting.AddRangeAsync(defaultSettings);
+ // await context.SaveChangesAsync();
// await context.ServerSetting.AddAsync(new ServerSetting
// {
// CacheDirectory = CacheService.CacheDirectory
diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs
index a8a5c1f98..77e9b579f 100644
--- a/API/Data/SeriesRepository.cs
+++ b/API/Data/SeriesRepository.cs
@@ -160,7 +160,15 @@ namespace API.Data
{
return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
}
-
+
+ public async Task GetSeriesByIdAsync(int seriesId)
+ {
+ return await _context.Series
+ .Include(s => s.Volumes)
+ .Where(s => s.Id == seriesId)
+ .SingleOrDefaultAsync();
+ }
+
private async Task AddSeriesModifiers(int userId, List series)
{
var userProgress = await _context.AppUserProgresses
diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs
index fba75e148..318c87340 100644
--- a/API/Extensions/ApplicationServiceExtensions.cs
+++ b/API/Extensions/ApplicationServiceExtensions.cs
@@ -8,6 +8,7 @@ using Hangfire.LiteDB;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
namespace API.Extensions
{
@@ -21,6 +22,7 @@ namespace API.Extensions
services.AddScoped();
services.AddScoped();
services.AddScoped();
+ services.AddScoped();
@@ -29,6 +31,12 @@ namespace API.Extensions
options.UseSqlite(config.GetConnectionString("DefaultConnection"));
});
+ services.AddLogging(loggingBuilder =>
+ {
+ var loggingSection = config.GetSection("Logging");
+ loggingBuilder.AddFile(loggingSection);
+ });
+
services.AddHangfire(configuration => configuration
.UseSimpleAssemblyNameTypeSerializer()
.UseRecommendedSerializerSettings()
diff --git a/API/IO/ImageProvider.cs b/API/IO/ImageProvider.cs
deleted file mode 100644
index ebfe85d76..000000000
--- a/API/IO/ImageProvider.cs
+++ /dev/null
@@ -1,66 +0,0 @@
-using System;
-using System.IO;
-using System.IO.Compression;
-using System.Linq;
-using API.Extensions;
-using NetVips;
-
-namespace API.IO
-{
- public static class ImageProvider
- {
- ///
- /// Generates byte array of cover image.
- /// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless
- /// a folder.extension exists in the root directory of the compressed file.
- ///
- ///
- /// Create a smaller variant of file extracted from archive. Archive images are usually 1MB each.
- ///
- public static byte[] GetCoverImage(string filepath, bool createThumbnail = false)
- {
- if (string.IsNullOrEmpty(filepath) || !File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty();
-
- using ZipArchive archive = ZipFile.OpenRead(filepath);
- if (!archive.HasFiles()) return Array.Empty();
-
-
-
- var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
- var entry = archive.Entries.Where(x => Path.HasExtension(x.FullName)).OrderBy(x => x.FullName).ToList()[0];
-
- if (folder != null)
- {
- entry = folder;
- }
-
- if (createThumbnail)
- {
- try
- {
- using var stream = entry.Open();
- var thumbnail = Image.ThumbnailStream(stream, 320);
- Console.WriteLine(thumbnail.ToString());
- return thumbnail.WriteToBuffer(".jpg");
- }
- catch (Exception ex)
- {
- Console.WriteLine("There was a critical error and prevented thumbnail generation.");
- Console.WriteLine(ex.Message);
- }
- }
-
- return ExtractEntryToImage(entry);
- }
-
- private static byte[] ExtractEntryToImage(ZipArchiveEntry entry)
- {
- using var stream = entry.Open();
- using var ms = new MemoryStream();
- stream.CopyTo(ms);
- var data = ms.ToArray();
-
- return data;
- }
- }
-}
\ No newline at end of file
diff --git a/API/Interfaces/IDirectoryService.cs b/API/Interfaces/IDirectoryService.cs
index 93e7c1e7b..5f9958f03 100644
--- a/API/Interfaces/IDirectoryService.cs
+++ b/API/Interfaces/IDirectoryService.cs
@@ -13,35 +13,6 @@ namespace API.Interfaces
/// List of folder names
IEnumerable ListDirectory(string rootPath);
- ///
- /// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite
- /// cover images if forceUpdate is true.
- ///
- /// Library to scan against
- /// Force overwriting for cover images
- void ScanLibrary(int libraryId, bool forceUpdate);
-
- void ScanLibraries();
-
- ///
- /// Returns the path a volume would be extracted to.
- /// Deprecated.
- ///
- ///
- ///
- string GetExtractPath(int volumeId);
-
Task ReadImageAsync(string imagePath);
-
- ///
- /// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists,
- /// will return that without performing an extraction. Returns empty string if there are any invalidations which would
- /// prevent operations to perform correctly (missing archivePath file, empty archive, etc).
- ///
- /// A valid file to an archive file.
- /// Path to extract to
- ///
- string ExtractArchive(string archivePath, string extractPath);
-
}
}
\ No newline at end of file
diff --git a/API/Interfaces/ILibraryRepository.cs b/API/Interfaces/ILibraryRepository.cs
index 1a0d3f778..d1de28288 100644
--- a/API/Interfaces/ILibraryRepository.cs
+++ b/API/Interfaces/ILibraryRepository.cs
@@ -14,7 +14,6 @@ namespace API.Interfaces
Task GetLibraryForIdAsync(int libraryId);
Task> GetLibraryDtosForUsernameAsync(string userName);
Task> GetLibrariesAsync();
- Task GetLibraryForNameAsync(string libraryName);
Task DeleteLibrary(int libraryId);
}
}
\ No newline at end of file
diff --git a/API/Interfaces/IScannerService.cs b/API/Interfaces/IScannerService.cs
new file mode 100644
index 000000000..8d4399eb7
--- /dev/null
+++ b/API/Interfaces/IScannerService.cs
@@ -0,0 +1,25 @@
+using System.Threading.Tasks;
+using API.DTOs;
+
+namespace API.Interfaces
+{
+ public interface IScannerService
+ {
+ ///
+ /// Given a library id, scans folders for said library. Parses files and generates DB updates. Will overwrite
+ /// cover images if forceUpdate is true.
+ ///
+ /// Library to scan against
+ /// Force overwriting for cover images
+ void ScanLibrary(int libraryId, bool forceUpdate);
+
+ void ScanLibraries();
+
+ ///
+ /// Performs a forced scan of just a series folder.
+ ///
+ ///
+ ///
+ void ScanSeries(int libraryId, int seriesId);
+ }
+}
\ No newline at end of file
diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs
index d3be26a60..db758b2e5 100644
--- a/API/Interfaces/ISeriesRepository.cs
+++ b/API/Interfaces/ISeriesRepository.cs
@@ -23,6 +23,7 @@ namespace API.Interfaces
Task> GetVolumesForSeriesAsync(int[] seriesIds);
Task DeleteSeriesAsync(int seriesId);
Task GetVolumeByIdAsync(int volumeId);
-
+ Task GetSeriesByIdAsync(int seriesId);
+
}
}
\ No newline at end of file
diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs
index c1ea7e336..b19dc9291 100644
--- a/API/Interfaces/ITaskScheduler.cs
+++ b/API/Interfaces/ITaskScheduler.cs
@@ -2,12 +2,8 @@
{
public interface ITaskScheduler
{
- public void ScanLibrary(int libraryId, bool forceUpdate = false);
-
- public void CleanupVolumes(int[] volumeIds);
- ///
- /// Clears the cache directory entirely.
- ///
- public void ClearCache();
+ void ScanLibrary(int libraryId, bool forceUpdate = false);
+ void CleanupVolumes(int[] volumeIds);
+ void ScanSeries(int libraryId, int seriesId);
}
}
\ No newline at end of file
diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs
index 6f2d4eaf2..df0e2d08d 100644
--- a/API/Parser/Parser.cs
+++ b/API/Parser/Parser.cs
@@ -7,7 +7,7 @@ namespace API.Parser
{
public static class Parser
{
- public static readonly string MangaFileExtensions = @"\.cbz|\.cbr|\.png|\.jpeg|\.jpg|\.zip|\.rar";
+ public static readonly string MangaFileExtensions = @"\.cbz|\.zip"; // |\.rar|\.cbr
public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg|\.gif";
//?: is a non-capturing group in C#, else anything in () will be a group
@@ -22,6 +22,10 @@ namespace API.Parser
new Regex(
@"(vol. ?)(?0*[1-9]+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
+ // Tonikaku Cawaii [Volume 11].cbz
+ new Regex(
+ @"(volume )(?0?[1-9]+)",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Dance in the Vampire Bund v16-17
new Regex(
@@ -49,7 +53,7 @@ namespace API.Parser
// Black Bullet
new Regex(
- @"(?.*)(\b|_)(v|vo|c)",
+ @"(?.*)(\b|_)(v|vo|c|volume)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)
@@ -58,16 +62,18 @@ namespace API.Parser
@"(?.*)\(\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
- // [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's always last)
+ // [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last)
new Regex(
@"(?.*)(\b|_)(c)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
+ // Akiiro Bousou Biyori - 01.jpg
+ new Regex(
+ @"(?.*)(\b|_)(\d+)",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Darker Than Black (This takes anything, we have to account for perfectly named folders)
new Regex(
@"(?.*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
-
-
};
private static readonly Regex[] ReleaseGroupRegex = new[]
@@ -121,16 +127,21 @@ namespace API.Parser
var matches = regex.Matches(filename);
foreach (Match match in matches)
{
- if (match.Groups["Volume"] != Match.Empty)
+ // if (match.Groups["Volume"] != Match.Empty)
+ // {
+ //
+ // }
+ if (match.Success && match.Groups["Series"].Value != string.Empty)
{
- return CleanTitle(match.Groups["Series"].Value);
+ return CleanTitle(match.Groups["Series"].Value);
}
+
}
}
- Console.WriteLine("Unable to parse {0}", filename);
- return "";
+ Console.WriteLine("Unable to parse Series of {0}", filename);
+ return string.Empty;
}
public static string ParseVolume(string filename)
@@ -148,8 +159,8 @@ namespace API.Parser
}
}
- Console.WriteLine("Unable to parse {0}", filename);
- return "";
+ Console.WriteLine("Unable to parse Volume of {0}", filename);
+ return "0";
}
public static string ParseChapter(string filename)
@@ -200,7 +211,12 @@ namespace API.Parser
}
}
- title = title.Replace("_", " ");
+ title = title.Replace("_", " ").Trim();
+ if (title.EndsWith("-"))
+ {
+ title = title.Substring(0, title.Length - 1);
+ }
+
return title.Trim();
}
@@ -235,7 +251,8 @@ namespace API.Parser
public static string RemoveLeadingZeroes(string title)
{
- return title.TrimStart(new[] { '0' });
+ var ret = title.TrimStart(new[] { '0' });
+ return ret == string.Empty ? "0" : ret;
}
public static bool IsArchive(string filePath)
diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs
index 87e85ce9a..85774e128 100644
--- a/API/Services/CacheService.cs
+++ b/API/Services/CacheService.cs
@@ -1,5 +1,6 @@
using System;
using System.IO;
+using System.IO.Compression;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
@@ -28,6 +29,7 @@ namespace API.Services
private bool CacheDirectoryIsAccessible()
{
+ _logger.LogDebug($"Checking if valid Cache directory: {CacheDirectory}");
var di = new DirectoryInfo(CacheDirectory);
return di.Exists;
}
@@ -43,7 +45,7 @@ namespace API.Services
{
var extractPath = GetVolumeCachePath(volumeId, file);
- _directoryService.ExtractArchive(file.FilePath, extractPath);
+ ExtractArchive(file.FilePath, extractPath);
}
return volume;
@@ -88,6 +90,45 @@ namespace API.Services
}
_logger.LogInformation("Cache directory purged");
}
+
+ ///
+ /// Extracts an archive to a temp cache directory. Returns path to new directory. If temp cache directory already exists,
+ /// will return that without performing an extraction. Returns empty string if there are any invalidations which would
+ /// prevent operations to perform correctly (missing archivePath file, empty archive, etc).
+ ///
+ /// A valid file to an archive file.
+ /// Path to extract to
+ ///
+ private string ExtractArchive(string archivePath, string extractPath)
+ {
+ // NOTE: This is used by Cache Service
+ if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
+ {
+ _logger.LogError($"Archive {archivePath} could not be found.");
+ return "";
+ }
+
+ if (Directory.Exists(extractPath))
+ {
+ _logger.LogDebug($"Archive {archivePath} has already been extracted. Returning existing folder.");
+ return extractPath;
+ }
+
+ using ZipArchive archive = ZipFile.OpenRead(archivePath);
+ // TODO: Throw error if we couldn't extract
+ var needsFlattening = archive.Entries.Count > 0 && !Path.HasExtension(archive.Entries.ElementAt(0).FullName);
+ if (!archive.HasFiles() && !needsFlattening) return "";
+
+ archive.ExtractToDirectory(extractPath);
+ _logger.LogDebug($"Extracting archive to {extractPath}");
+
+ if (!needsFlattening) return extractPath;
+
+ _logger.LogInformation("Extracted archive is nested in root folder, flattening...");
+ new DirectoryInfo(extractPath).Flatten();
+
+ return extractPath;
+ }
private string GetVolumeCachePath(int volumeId, MangaFile file)
diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs
index 9c619408a..d910c5c1b 100644
--- a/API/Services/DirectoryService.cs
+++ b/API/Services/DirectoryService.cs
@@ -1,38 +1,20 @@
using System;
-using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
-using System.Diagnostics;
using System.IO;
-using System.IO.Compression;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using API.DTOs;
-using API.Entities;
-using API.Extensions;
using API.Interfaces;
-using API.IO;
-using API.Parser;
-using Microsoft.Extensions.Logging;
using NetVips;
namespace API.Services
{
public class DirectoryService : IDirectoryService
{
- private readonly ILogger _logger;
- private readonly IUnitOfWork _unitOfWork;
- private ConcurrentDictionary> _scannedSeries;
-
- public DirectoryService(ILogger logger, IUnitOfWork unitOfWork)
- {
- _logger = logger;
- _unitOfWork = unitOfWork;
- }
-
///
/// Given a set of regex search criteria, get files in the given path.
///
@@ -69,302 +51,23 @@ namespace API.Services
return dirs;
}
-
- ///
- /// Processes files found during a library scan. Generates a collection of series->volume->files for DB processing later.
- ///
- /// Path of a file
- private void Process(string path)
- {
- var fileName = Path.GetFileName(path);
- _logger.LogDebug($"Parsing file {fileName}");
-
- var info = Parser.Parser.Parse(fileName);
- info.FullFilePath = path;
- if (info.Volumes == string.Empty)
- {
- return;
- }
-
- ConcurrentBag newBag = new ConcurrentBag();
- if (_scannedSeries.TryGetValue(info.Series, out var tempBag))
- {
- var existingInfos = tempBag.ToArray();
- foreach (var existingInfo in existingInfos)
- {
- newBag.Add(existingInfo);
- }
- }
- else
- {
- tempBag = new ConcurrentBag();
- }
-
- newBag.Add(info);
-
- if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag))
- {
- _scannedSeries.TryAdd(info.Series, newBag);
- }
- }
- private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate)
+ public async Task ReadImageAsync(string imagePath)
{
- var volumes = UpdateVolumes(series, infos, forceUpdate);
- series.Volumes = volumes;
- series.Pages = volumes.Sum(v => v.Pages);
- if (series.CoverImage == null || forceUpdate)
- {
- series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage;
- }
- if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
- {
- series.Summary = ""; // TODO: Check if comicInfo.xml in file and parse metadata out.
- }
-
+ using var image = Image.NewFromFile(imagePath);
- return series;
- }
-
- private MangaFile CreateMangaFile(ParserInfo info)
- {
- _logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
- int.TryParse(info.Chapters, out var chapter);
- _logger.LogDebug($"Found Chapter: {chapter}");
- return new MangaFile()
+ return new ImageDto
{
- FilePath = info.FullFilePath,
- Chapter = chapter,
- Format = info.Format,
- NumberOfPages = GetNumberOfPagesFromArchive(info.FullFilePath)
+ Content = await File.ReadAllBytesAsync(imagePath),
+ Filename = Path.GetFileNameWithoutExtension(imagePath),
+ FullPath = Path.GetFullPath(imagePath),
+ Width = image.Width,
+ Height = image.Height,
+ Format = image.Format
};
}
- ///
- /// Creates or Updates volumes for a given series
- ///
- /// Series wanting to be updated
- /// Parser info
- /// Forces metadata update (cover image) even if it's already been set.
- /// Updated Volumes for given series
- private ICollection UpdateVolumes(Series series, ParserInfo[] infos, bool forceUpdate)
- {
- ICollection volumes = new List();
- IList existingVolumes = _unitOfWork.SeriesRepository.GetVolumes(series.Id).ToList();
-
- foreach (var info in infos)
- {
- var existingVolume = existingVolumes.SingleOrDefault(v => v.Name == info.Volumes);
- if (existingVolume != null)
- {
- var existingFile = existingVolume.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
- if (existingFile != null)
- {
- existingFile.Chapter = Int32.Parse(info.Chapters);
- existingFile.Format = info.Format;
- existingFile.NumberOfPages = GetNumberOfPagesFromArchive(info.FullFilePath);
- }
- else
- {
- existingVolume.Files.Add(CreateMangaFile(info));
- }
-
- volumes.Add(existingVolume);
- }
- else
- {
- existingVolume = volumes.SingleOrDefault(v => v.Name == info.Volumes);
- if (existingVolume != null)
- {
- existingVolume.Files.Add(CreateMangaFile(info));
- }
- else
- {
- var vol = new Volume()
- {
- Name = info.Volumes,
- Number = Int32.Parse(info.Volumes),
- Files = new List()
- {
- CreateMangaFile(info)
- }
- };
- volumes.Add(vol);
- }
- }
-
- Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.Filename}");
- }
-
- foreach (var volume in volumes)
- {
- if (forceUpdate || volume.CoverImage == null || !volume.Files.Any())
- {
- var firstFile = volume.Files.OrderBy(x => x.Chapter).FirstOrDefault()?.FilePath;
- volume.CoverImage = ImageProvider.GetCoverImage(firstFile, true);
- }
-
- volume.Pages = volume.Files.Sum(x => x.NumberOfPages);
- }
-
- return volumes;
- }
-
- public void ScanLibraries()
- {
- var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
- foreach (var lib in libraries)
- {
- ScanLibrary(lib.Id, false);
- }
- }
-
- public void ScanLibrary(int libraryId, bool forceUpdate)
- {
- var sw = Stopwatch.StartNew();
- Library library;
- try
- {
- library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
- }
- catch (Exception ex)
- {
- // This usually only fails if user is not authenticated.
- _logger.LogError($"There was an issue fetching Library {libraryId}.", ex);
- return;
- }
-
- _scannedSeries = new ConcurrentDictionary>();
- _logger.LogInformation($"Beginning scan on {library.Name}");
-
- var totalFiles = 0;
- foreach (var folderPath in library.Folders)
- {
- try {
- totalFiles = TraverseTreeParallelForEach(folderPath.Path, (f) =>
- {
- try
- {
- Process(f);
- }
- catch (FileNotFoundException exception)
- {
- _logger.LogError(exception, "The file could not be found");
- }
- });
- }
- catch (ArgumentException ex) {
- _logger.LogError(ex, $"The directory '{folderPath}' does not exist");
- }
- }
-
- var filtered = _scannedSeries.Where(kvp => !kvp.Value.IsEmpty);
- var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value);
-
- // Perform DB activities
- var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
- foreach (var seriesKey in series.Keys)
- {
- var mangaSeries = allSeries.SingleOrDefault(s => s.Name == seriesKey) ?? new Series
- {
- Name = seriesKey,
- OriginalName = seriesKey,
- SortName = seriesKey,
- Summary = ""
- };
- mangaSeries = UpdateSeries(mangaSeries, series[seriesKey].ToArray(), forceUpdate);
- _logger.LogInformation($"Created/Updated series {mangaSeries.Name} for {library.Name} library");
- library.Series ??= new List();
- library.Series.Add(mangaSeries);
- }
-
- // Remove series that are no longer on disk
- foreach (var existingSeries in allSeries)
- {
- if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName))
- {
- // Delete series, there is no file to backup any longer.
- library.Series.Remove(existingSeries);
- }
- }
-
- _unitOfWork.LibraryRepository.Update(library);
-
- if (Task.Run(() => _unitOfWork.Complete()).Result)
- {
- _logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series.");
- }
- else
- {
- _logger.LogError("There was a critical error that resulted in a failed scan. Please rescan.");
- }
-
- _scannedSeries = null;
- _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
- }
-
- public string GetExtractPath(int volumeId)
- {
- return Path.Join(Directory.GetCurrentDirectory(), $"../cache/{volumeId}/");
- }
-
- public string ExtractArchive(string archivePath, string extractPath)
- {
- if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
- {
- _logger.LogError($"Archive {archivePath} could not be found.");
- return "";
- }
-
- if (Directory.Exists(extractPath))
- {
- _logger.LogDebug($"Archive {archivePath} has already been extracted. Returning existing folder.");
- return extractPath;
- }
-
- using ZipArchive archive = ZipFile.OpenRead(archivePath);
- // TODO: Throw error if we couldn't extract
- var needsFlattening = archive.Entries.Count > 0 && !Path.HasExtension(archive.Entries.ElementAt(0).FullName);
- if (!archive.HasFiles() && !needsFlattening) return "";
-
- archive.ExtractToDirectory(extractPath);
- _logger.LogDebug($"Extracting archive to {extractPath}");
-
- if (!needsFlattening) return extractPath;
-
- _logger.LogInformation("Extracted archive is nested in root folder, flattening...");
- new DirectoryInfo(extractPath).Flatten();
-
- return extractPath;
- }
-
- private int GetNumberOfPagesFromArchive(string archivePath)
- {
- if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
- {
- _logger.LogError($"Archive {archivePath} could not be found.");
- return 0;
- }
-
- using ZipArchive archive = ZipFile.OpenRead(archivePath);
- return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
- }
-
-
- public async Task ReadImageAsync(string imagePath)
- {
- using var image = Image.NewFromFile(imagePath);
-
- return new ImageDto
- {
- Content = await File.ReadAllBytesAsync(imagePath),
- Filename = Path.GetFileNameWithoutExtension(imagePath),
- FullPath = Path.GetFullPath(imagePath),
- Width = image.Width,
- Height = image.Height,
- Format = image.Format
- };
- }
+
///
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed
@@ -373,16 +76,16 @@ namespace API.Services
/// Directory to scan
/// Action to apply on file path
///
- private static int TraverseTreeParallelForEach(string root, Action action)
+ public static int TraverseTreeParallelForEach(string root, Action action)
{
- //Count of files traversed and timer for diagnostic output
- int fileCount = 0;
+ //Count of files traversed and timer for diagnostic output
+ var fileCount = 0;
// Determine whether to parallelize file processing on each folder based on processor count.
- int procCount = Environment.ProcessorCount;
+ var procCount = Environment.ProcessorCount;
// Data structure to hold names of subfolders to be examined for files.
- Stack dirs = new Stack();
+ var dirs = new Stack();
if (!Directory.Exists(root)) {
throw new ArgumentException("The directory doesn't exist");
@@ -390,7 +93,7 @@ namespace API.Services
dirs.Push(root);
while (dirs.Count > 0) {
- string currentDir = dirs.Pop();
+ var currentDir = dirs.Pop();
string[] subDirs;
string[] files;
@@ -409,7 +112,9 @@ namespace API.Services
}
try {
- files = DirectoryService.GetFilesWithCertainExtensions(currentDir, Parser.Parser.MangaFileExtensions)
+ // TODO: In future, we need to take LibraryType into consideration for what extensions to allow (RAW should allow images)
+ // or we need to move this filtering to another area (Process)
+ files = GetFilesWithCertainExtensions(currentDir, Parser.Parser.MangaFileExtensions)
.ToArray();
}
catch (UnauthorizedAccessException e) {
diff --git a/API/Services/ScannerService.cs b/API/Services/ScannerService.cs
new file mode 100644
index 000000000..ade27b310
--- /dev/null
+++ b/API/Services/ScannerService.cs
@@ -0,0 +1,386 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Collections.Immutable;
+using System.Diagnostics;
+using System.IO;
+using System.IO.Compression;
+using System.Linq;
+using System.Threading.Tasks;
+using API.Entities;
+using API.Extensions;
+using API.Interfaces;
+using API.Parser;
+using Microsoft.Extensions.Logging;
+using NetVips;
+
+namespace API.Services
+{
+ public class ScannerService : IScannerService
+ {
+ private readonly IUnitOfWork _unitOfWork;
+ private readonly ILogger _logger;
+ private ConcurrentDictionary> _scannedSeries;
+
+ public ScannerService(IUnitOfWork unitOfWork, ILogger logger)
+ {
+ _unitOfWork = unitOfWork;
+ _logger = logger;
+ }
+
+ public void ScanLibraries()
+ {
+ var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
+ foreach (var lib in libraries)
+ {
+ ScanLibrary(lib.Id, false);
+ }
+ }
+
+ public void ScanLibrary(int libraryId, bool forceUpdate)
+ {
+
+ var sw = Stopwatch.StartNew();
+ Library library;
+ try
+ {
+ library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
+ }
+ catch (Exception ex)
+ {
+ // This usually only fails if user is not authenticated.
+ _logger.LogError($"There was an issue fetching Library {libraryId}.", ex);
+ return;
+ }
+
+ _scannedSeries = new ConcurrentDictionary>();
+ _logger.LogInformation($"Beginning scan on {library.Name}. Forcing metadata update: {forceUpdate}");
+
+ var totalFiles = 0;
+ foreach (var folderPath in library.Folders)
+ {
+ try {
+ totalFiles = DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) =>
+ {
+ try
+ {
+ ProcessFile(f);
+ }
+ catch (FileNotFoundException exception)
+ {
+ _logger.LogError(exception, "The file could not be found");
+ }
+ });
+ }
+ catch (ArgumentException ex) {
+ _logger.LogError(ex, $"The directory '{folderPath}' does not exist");
+ }
+ }
+
+ var filtered = _scannedSeries.Where(kvp => !kvp.Value.IsEmpty);
+ var series = filtered.ToImmutableDictionary(v => v.Key, v => v.Value);
+
+ // Perform DB activities
+ var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
+ foreach (var seriesKey in series.Keys)
+ {
+ var mangaSeries = allSeries.SingleOrDefault(s => s.Name == seriesKey) ?? new Series
+ {
+ Name = seriesKey,
+ OriginalName = seriesKey,
+ SortName = seriesKey,
+ Summary = ""
+ };
+ try
+ {
+ mangaSeries = UpdateSeries(mangaSeries, series[seriesKey].ToArray(), forceUpdate);
+ _logger.LogInformation($"Created/Updated series {mangaSeries.Name} for {library.Name} library");
+ library.Series ??= new List();
+ library.Series.Add(mangaSeries);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, $"There was an error during scanning of library. {seriesKey} will be skipped.");
+ }
+ }
+
+ // Remove series that are no longer on disk
+ foreach (var existingSeries in allSeries)
+ {
+ if (!series.ContainsKey(existingSeries.Name) || !series.ContainsKey(existingSeries.OriginalName))
+ {
+ // Delete series, there is no file to backup any longer.
+ library.Series?.Remove(existingSeries);
+ }
+ }
+
+ _unitOfWork.LibraryRepository.Update(library);
+
+ if (Task.Run(() => _unitOfWork.Complete()).Result)
+ {
+ _logger.LogInformation($"Scan completed on {library.Name}. Parsed {series.Keys.Count()} series.");
+ }
+ else
+ {
+ _logger.LogError("There was a critical error that resulted in a failed scan. Please rescan.");
+ }
+
+ _scannedSeries = null;
+ _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
+ }
+
+ ///
+ /// Processes files found during a library scan. Generates a collection of for DB updates later.
+ ///
+ /// Path of a file
+ private void ProcessFile(string path)
+ {
+ var fileName = Path.GetFileName(path);
+ //var directoryName = (new FileInfo(path)).Directory?.Name;
+
+ _logger.LogDebug($"Parsing file {fileName}");
+
+
+ var info = Parser.Parser.Parse(fileName);
+ info.FullFilePath = path;
+ if (info.Series == string.Empty)
+ {
+ _logger.LogInformation($"Could not parse series or volume from {fileName}");
+ return;
+ }
+
+ ConcurrentBag newBag = new ConcurrentBag();
+ // Use normalization for key lookup due to parsing disparities
+ var existingKey = _scannedSeries.Keys.SingleOrDefault(k => k.ToLower() == info.Series.ToLower());
+ if (existingKey != null) info.Series = existingKey;
+ if (_scannedSeries.TryGetValue(info.Series, out var tempBag))
+ {
+ var existingInfos = tempBag.ToArray();
+ foreach (var existingInfo in existingInfos)
+ {
+ newBag.Add(existingInfo);
+ }
+ }
+ else
+ {
+ tempBag = new ConcurrentBag();
+ }
+
+ newBag.Add(info);
+
+ if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag))
+ {
+ _scannedSeries.TryAdd(info.Series, newBag);
+ }
+ }
+
+ private Series UpdateSeries(Series series, ParserInfo[] infos, bool forceUpdate)
+ {
+ var volumes = UpdateVolumes(series, infos, forceUpdate);
+ series.Volumes = volumes;
+ series.Pages = volumes.Sum(v => v.Pages);
+ if (series.CoverImage == null || forceUpdate)
+ {
+ series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0)?.CoverImage;
+ }
+ if (string.IsNullOrEmpty(series.Summary) || forceUpdate)
+ {
+ series.Summary = ""; // TODO: Check if comicInfo.xml in file and parse metadata out.
+ }
+
+
+ return series;
+ }
+
+ private MangaFile CreateMangaFile(ParserInfo info)
+ {
+ _logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
+
+ int.TryParse(info.Chapters, out var chapter);
+ _logger.LogDebug($"Found Chapter: {chapter}");
+ return new MangaFile()
+ {
+ FilePath = info.FullFilePath,
+ Chapter = chapter,
+ Format = info.Format,
+ NumberOfPages = info.Format == MangaFormat.Archive ? GetNumberOfPagesFromArchive(info.FullFilePath): 1
+ };
+ }
+
+ private int MinimumNumberFromRange(string range)
+ {
+ var tokens = range.Split("-");
+ return Int32.Parse(tokens.Length >= 1 ? tokens[0] : range);
+ }
+
+ ///
+ /// Creates or Updates volumes for a given series
+ ///
+ /// Series wanting to be updated
+ /// Parser info
+ /// Forces metadata update (cover image) even if it's already been set.
+ /// Updated Volumes for given series
+ private ICollection UpdateVolumes(Series series, ParserInfo[] infos, bool forceUpdate)
+ {
+ ICollection volumes = new List();
+ IList existingVolumes = _unitOfWork.SeriesRepository.GetVolumes(series.Id).ToList();
+
+ foreach (var info in infos)
+ {
+ var existingVolume = existingVolumes.SingleOrDefault(v => v.Name == info.Volumes);
+ if (existingVolume != null)
+ {
+ var existingFile = existingVolume.Files.SingleOrDefault(f => f.FilePath == info.FullFilePath);
+ if (existingFile != null)
+ {
+ existingFile.Chapter = MinimumNumberFromRange(info.Chapters);
+ existingFile.Format = info.Format;
+ existingFile.NumberOfPages = GetNumberOfPagesFromArchive(info.FullFilePath);
+ }
+ else
+ {
+ if (info.Format == MangaFormat.Archive)
+ {
+ existingVolume.Files.Add(CreateMangaFile(info));
+ }
+ else
+ {
+ _logger.LogDebug($"Ignoring {info.Filename} as it is not an archive.");
+ }
+
+ }
+
+ volumes.Add(existingVolume);
+ }
+ else
+ {
+ existingVolume = volumes.SingleOrDefault(v => v.Name == info.Volumes);
+ if (existingVolume != null)
+ {
+ existingVolume.Files.Add(CreateMangaFile(info));
+ }
+ else
+ {
+ var vol = new Volume()
+ {
+ Name = info.Volumes,
+ Number = MinimumNumberFromRange(info.Volumes),
+ Files = new List()
+ {
+ CreateMangaFile(info)
+ }
+ };
+ volumes.Add(vol);
+ }
+ }
+
+ Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.Filename}");
+ }
+
+ foreach (var volume in volumes)
+ {
+ if (forceUpdate || volume.CoverImage == null || !volume.Files.Any())
+ {
+ var firstFile = volume.Files.OrderBy(x => x.Chapter).FirstOrDefault()?.FilePath;
+ volume.CoverImage = GetCoverImage(firstFile, true); // ZIPFILE
+ }
+
+ volume.Pages = volume.Files.Sum(x => x.NumberOfPages);
+ }
+
+ return volumes;
+ }
+
+
+
+
+ public void ScanSeries(int libraryId, int seriesId)
+ {
+ throw new NotImplementedException();
+ }
+
+ private int GetNumberOfPagesFromArchive(string archivePath)
+ {
+ if (!File.Exists(archivePath) || !Parser.Parser.IsArchive(archivePath))
+ {
+ _logger.LogError($"Archive {archivePath} could not be found.");
+ return 0;
+ }
+
+ _logger.LogDebug($"Getting Page numbers from {archivePath}");
+
+ using ZipArchive archive = ZipFile.OpenRead(archivePath); // ZIPFILE
+ return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
+ }
+
+ ///
+ /// Generates byte array of cover image.
+ /// Given a path to a compressed file (zip, rar, cbz, cbr, etc), will ensure the first image is returned unless
+ /// a folder.extension exists in the root directory of the compressed file.
+ ///
+ ///
+ /// Create a smaller variant of file extracted from archive. Archive images are usually 1MB each.
+ ///
+ public static byte[] GetCoverImage(string filepath, bool createThumbnail = false)
+ {
+ try
+ {
+ if (string.IsNullOrEmpty(filepath) || !File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty();
+
+ Console.WriteLine($"Extracting Cover image from {filepath}");
+ using ZipArchive archive = ZipFile.OpenRead(filepath);
+ if (!archive.HasFiles()) return Array.Empty();
+
+ var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
+ var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
+ ZipArchiveEntry entry;
+
+ if (folder != null)
+ {
+ entry = folder;
+ } else if (!entries.Any())
+ {
+ return Array.Empty();
+ }
+ else
+ {
+ entry = entries[0];
+ }
+
+
+ if (createThumbnail)
+ {
+ try
+ {
+ using var stream = entry.Open();
+ var thumbnail = Image.ThumbnailStream(stream, 320);
+ return thumbnail.WriteToBuffer(".jpg");
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine("There was a critical error and prevented thumbnail generation.");
+ Console.WriteLine(ex.Message);
+ }
+ }
+
+ return ExtractEntryToImage(entry);
+ }
+ catch (Exception e)
+ {
+ Console.WriteLine(e);
+ return Array.Empty();
+ }
+ }
+
+ private static byte[] ExtractEntryToImage(ZipArchiveEntry entry)
+ {
+ using var stream = entry.Open();
+ using var ms = new MemoryStream();
+ stream.CopyTo(ms);
+ var data = ms.ToArray();
+
+ return data;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs
index efb05b852..2ed039b8d 100644
--- a/API/Services/TaskScheduler.cs
+++ b/API/Services/TaskScheduler.cs
@@ -8,25 +8,30 @@ namespace API.Services
{
private readonly ICacheService _cacheService;
private readonly ILogger _logger;
- private readonly IDirectoryService _directoryService;
+ private readonly IScannerService _scannerService;
public BackgroundJobServer Client => new BackgroundJobServer();
- public TaskScheduler(ICacheService cacheService, ILogger logger,
- IDirectoryService directoryService)
+ public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService)
{
_cacheService = cacheService;
_logger = logger;
- _directoryService = directoryService;
+ _scannerService = scannerService;
_logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis.");
RecurringJob.AddOrUpdate(() => _cacheService.Cleanup(), Cron.Daily);
- RecurringJob.AddOrUpdate(() => directoryService.ScanLibraries(), Cron.Daily);
+ RecurringJob.AddOrUpdate(() => _scannerService.ScanLibraries(), Cron.Daily);
+ }
+
+ public void ScanSeries(int libraryId, int seriesId)
+ {
+ _logger.LogInformation($"Enqueuing series scan for series: {seriesId}");
+ BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId));
}
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
_logger.LogInformation($"Enqueuing library scan for: {libraryId}");
- BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(libraryId, forceUpdate));
+ BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
}
public void CleanupVolumes(int[] volumeIds)
@@ -34,10 +39,6 @@ namespace API.Services
BackgroundJob.Enqueue(() => _cacheService.CleanupVolumes(volumeIds));
}
-
- public void ClearCache()
- {
- throw new System.NotImplementedException();
- }
+
}
}
\ No newline at end of file
diff --git a/API/Startup.cs b/API/Startup.cs
index 805d280c8..766b1d01d 100644
--- a/API/Startup.cs
+++ b/API/Startup.cs
@@ -51,7 +51,7 @@ namespace API
// Ordering is important. Cors, authentication, authorization
app.UseCors(policy => policy.AllowAnyHeader().AllowAnyMethod().WithOrigins("http://localhost:4200"));
-
+
app.UseAuthentication();
app.UseAuthorization();
diff --git a/API/appsettings.Development.json b/API/appsettings.Development.json
index 740eb4c1e..5c67bda6f 100644
--- a/API/appsettings.Development.json
+++ b/API/appsettings.Development.json
@@ -9,6 +9,12 @@
"Microsoft": "Information",
"Microsoft.Hosting.Lifetime": "Information",
"Hangfire": "Information"
+ },
+ "File": {
+ "Path": "kavita.log",
+ "Append": "True",
+ "FileSizeLimitBytes": 0,
+ "MaxRollingFiles": 0
}
}
}