mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
Refactored GetCoverImage to create a thumbnail (optional) instead of raw image (raw images are large and bloat API, using thumbnail is indistiguishable); b) Ability to force updating cover images in DB even if they are already set.
This commit is contained in:
parent
7f5a1d0518
commit
0b35ec70fd
@ -1,7 +1,9 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using API.IO;
|
using API.IO;
|
||||||
|
using NetVips;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using Xunit.Abstractions;
|
||||||
|
|
||||||
namespace API.Tests.Services
|
namespace API.Tests.Services
|
||||||
{
|
{
|
||||||
@ -10,6 +12,8 @@ namespace API.Tests.Services
|
|||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("v10.cbz", "v10.expected.jpg")]
|
[InlineData("v10.cbz", "v10.expected.jpg")]
|
||||||
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
|
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
|
||||||
|
//[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")]
|
||||||
|
[InlineData("Akame ga KILL! ZERO v06 (2017) (Digital) (LuCaZ).cbz", "Akame ga KILL! ZERO v06 (2017) (Digital) (LuCaZ).expected.jpg")]
|
||||||
public void GetCoverImageTest(string inputFile, string expectedOutputFile)
|
public void GetCoverImageTest(string inputFile, string expectedOutputFile)
|
||||||
{
|
{
|
||||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageProvider");
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ImageProvider");
|
||||||
|
Binary file not shown.
Binary file not shown.
After Width: | Height: | Size: 401 KiB |
Binary file not shown.
After Width: | Height: | Size: 20 KiB |
BIN
API.Tests/Services/Test Data/ImageProvider/thumbnail.jpg
Normal file
BIN
API.Tests/Services/Test Data/ImageProvider/thumbnail.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 395 KiB |
@ -20,6 +20,8 @@
|
|||||||
</PackageReference>
|
</PackageReference>
|
||||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.1" />
|
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.1" />
|
||||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.1" />
|
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.1" />
|
||||||
|
<PackageReference Include="NetVips" Version="1.2.4" />
|
||||||
|
<PackageReference Include="NetVips.Native" Version="8.10.5.1" />
|
||||||
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.16.0.25740">
|
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.16.0.25740">
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||||
@ -30,6 +32,20 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<None Remove="Hangfire-log.db" />
|
<None Remove="Hangfire-log.db" />
|
||||||
|
<None Remove="obj\**" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<Compile Remove="Interfaces\IMetadataService.cs" />
|
||||||
|
<Compile Remove="obj\**" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<EmbeddedResource Remove="obj\**" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<Content Remove="obj\**" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
</Project>
|
</Project>
|
||||||
|
@ -72,7 +72,7 @@ namespace API.Controllers
|
|||||||
if (await _userRepository.SaveAllAsync())
|
if (await _userRepository.SaveAllAsync())
|
||||||
{
|
{
|
||||||
var createdLibrary = await _libraryRepository.GetLibraryForNameAsync(library.Name);
|
var createdLibrary = await _libraryRepository.GetLibraryForNameAsync(library.Name);
|
||||||
BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(createdLibrary.Id));
|
BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(createdLibrary.Id, false));
|
||||||
return Ok();
|
return Ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,7 +131,7 @@ namespace API.Controllers
|
|||||||
[HttpPost("scan")]
|
[HttpPost("scan")]
|
||||||
public ActionResult ScanLibrary(int libraryId)
|
public ActionResult ScanLibrary(int libraryId)
|
||||||
{
|
{
|
||||||
BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(libraryId));
|
BackgroundJob.Enqueue(() => _directoryService.ScanLibrary(libraryId, true));
|
||||||
return Ok();
|
return Ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,6 +66,7 @@ namespace API.Data
|
|||||||
{
|
{
|
||||||
return _context.Volume
|
return _context.Volume
|
||||||
.Where(vol => vol.SeriesId == seriesId)
|
.Where(vol => vol.SeriesId == seriesId)
|
||||||
|
.Include(vol => vol.Files)
|
||||||
.OrderBy(vol => vol.Number)
|
.OrderBy(vol => vol.Number)
|
||||||
.ToList();
|
.ToList();
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Compression;
|
using System.IO.Compression;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using NetVips;
|
||||||
|
|
||||||
namespace API.IO
|
namespace API.IO
|
||||||
{
|
{
|
||||||
@ -13,28 +14,51 @@ namespace API.IO
|
|||||||
/// a folder.extension exists in the root directory of the compressed file.
|
/// a folder.extension exists in the root directory of the compressed file.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="filepath"></param>
|
/// <param name="filepath"></param>
|
||||||
|
/// <param name="createThumbnail">Create a smaller variant of file extracted from archive. Archive images are usually 1MB each.</param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
public static byte[] GetCoverImage(string filepath)
|
public static byte[] GetCoverImage(string filepath, bool createThumbnail = false)
|
||||||
{
|
{
|
||||||
if (!File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty<byte>();
|
if (!File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty<byte>();
|
||||||
|
|
||||||
using ZipArchive archive = ZipFile.OpenRead(filepath);
|
using ZipArchive archive = ZipFile.OpenRead(filepath);
|
||||||
if (archive.Entries.Count <= 0) return Array.Empty<byte>();
|
if (archive.Entries.Count <= 0) return Array.Empty<byte>();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
||||||
var entry = archive.Entries[0];
|
var entry = archive.Entries.OrderBy(x => x.FullName).ToList()[0];
|
||||||
|
|
||||||
if (folder != null)
|
if (folder != null)
|
||||||
{
|
{
|
||||||
entry = folder;
|
entry = folder;
|
||||||
}
|
}
|
||||||
|
|
||||||
return ExtractEntryToImage(entry);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if (entry.FullName.EndsWith(Path.PathSeparator))
|
||||||
|
{
|
||||||
|
// TODO: Implement nested directory support
|
||||||
|
}
|
||||||
|
|
||||||
|
if (createThumbnail)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var stream = entry.Open();
|
||||||
|
var thumbnail = Image.ThumbnailStream(stream, 320);
|
||||||
|
Console.WriteLine(thumbnail.ToString());
|
||||||
|
return thumbnail.WriteToBuffer(".jpg");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Console.WriteLine("There was a critical error and prevented thumbnail generation.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ExtractEntryToImage(entry);
|
||||||
|
}
|
||||||
|
|
||||||
private static byte[] ExtractEntryToImage(ZipArchiveEntry entry)
|
private static byte[] ExtractEntryToImage(ZipArchiveEntry entry)
|
||||||
{
|
{
|
||||||
var stream = entry.Open();
|
using var stream = entry.Open();
|
||||||
using var ms = new MemoryStream();
|
using var ms = new MemoryStream();
|
||||||
stream.CopyTo(ms);
|
stream.CopyTo(ms);
|
||||||
var data = ms.ToArray();
|
var data = ms.ToArray();
|
||||||
|
@ -6,6 +6,6 @@ namespace API.Interfaces
|
|||||||
{
|
{
|
||||||
IEnumerable<string> ListDirectory(string rootPath);
|
IEnumerable<string> ListDirectory(string rootPath);
|
||||||
|
|
||||||
void ScanLibrary(int libraryId);
|
void ScanLibrary(int libraryId, bool forceUpdate = false);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -12,6 +12,7 @@ using API.Entities;
|
|||||||
using API.Interfaces;
|
using API.Interfaces;
|
||||||
using API.IO;
|
using API.IO;
|
||||||
using API.Parser;
|
using API.Parser;
|
||||||
|
using Hangfire;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
|
|
||||||
namespace API.Services
|
namespace API.Services
|
||||||
@ -21,7 +22,7 @@ namespace API.Services
|
|||||||
private readonly ILogger<DirectoryService> _logger;
|
private readonly ILogger<DirectoryService> _logger;
|
||||||
private readonly ISeriesRepository _seriesRepository;
|
private readonly ISeriesRepository _seriesRepository;
|
||||||
private readonly ILibraryRepository _libraryRepository;
|
private readonly ILibraryRepository _libraryRepository;
|
||||||
|
|
||||||
private ConcurrentDictionary<string, ConcurrentBag<ParserInfo>> _scannedSeries;
|
private ConcurrentDictionary<string, ConcurrentBag<ParserInfo>> _scannedSeries;
|
||||||
|
|
||||||
public DirectoryService(ILogger<DirectoryService> logger,
|
public DirectoryService(ILogger<DirectoryService> logger,
|
||||||
@ -70,67 +71,45 @@ namespace API.Services
|
|||||||
|
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Processes files found during a library scan.
|
/// Processes files found during a library scan. Generates a collection of series->volume->files for DB processing later.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="path"></param>
|
/// <param name="path">Path of a file</param>
|
||||||
private void Process(string path)
|
private void Process(string path)
|
||||||
{
|
{
|
||||||
// NOTE: In current implementation, this never runs. We can probably remove.
|
var fileName = Path.GetFileName(path);
|
||||||
if (Directory.Exists(path))
|
_logger.LogDebug($"Parsing file {fileName}");
|
||||||
|
|
||||||
|
var info = Parser.Parser.Parse(fileName);
|
||||||
|
info.FullFilePath = path;
|
||||||
|
if (info.Volumes == string.Empty)
|
||||||
{
|
{
|
||||||
DirectoryInfo di = new DirectoryInfo(path);
|
return;
|
||||||
_logger.LogDebug($"Parsing directory {di.Name}");
|
}
|
||||||
|
|
||||||
var seriesName = Parser.Parser.ParseSeries(di.Name);
|
ConcurrentBag<ParserInfo> tempBag;
|
||||||
if (string.IsNullOrEmpty(seriesName))
|
ConcurrentBag<ParserInfo> newBag = new ConcurrentBag<ParserInfo>();
|
||||||
|
if (_scannedSeries.TryGetValue(info.Series, out tempBag))
|
||||||
|
{
|
||||||
|
var existingInfos = tempBag.ToArray();
|
||||||
|
foreach (var existingInfo in existingInfos)
|
||||||
{
|
{
|
||||||
return;
|
newBag.Add(existingInfo);
|
||||||
}
|
|
||||||
|
|
||||||
// We don't need ContainsKey, this is a race condition. We can replace with TryAdd instead
|
|
||||||
if (!_scannedSeries.ContainsKey(seriesName))
|
|
||||||
{
|
|
||||||
_scannedSeries.TryAdd(seriesName, new ConcurrentBag<ParserInfo>());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
var fileName = Path.GetFileName(path);
|
tempBag = new ConcurrentBag<ParserInfo>();
|
||||||
_logger.LogDebug($"Parsing file {fileName}");
|
}
|
||||||
|
|
||||||
var info = Parser.Parser.Parse(fileName);
|
newBag.Add(info);
|
||||||
info.FullFilePath = path;
|
|
||||||
if (info.Volumes == string.Empty)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
ConcurrentBag<ParserInfo> tempBag;
|
|
||||||
ConcurrentBag<ParserInfo> newBag = new ConcurrentBag<ParserInfo>();
|
|
||||||
if (_scannedSeries.TryGetValue(info.Series, out tempBag))
|
|
||||||
{
|
|
||||||
var existingInfos = tempBag.ToArray();
|
|
||||||
foreach (var existingInfo in existingInfos)
|
|
||||||
{
|
|
||||||
newBag.Add(existingInfo);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
tempBag = new ConcurrentBag<ParserInfo>();
|
|
||||||
}
|
|
||||||
|
|
||||||
newBag.Add(info);
|
|
||||||
|
|
||||||
if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag))
|
|
||||||
{
|
|
||||||
_scannedSeries.TryAdd(info.Series, newBag);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if (!_scannedSeries.TryUpdate(info.Series, newBag, tempBag))
|
||||||
|
{
|
||||||
|
_scannedSeries.TryAdd(info.Series, newBag);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Series UpdateSeries(string seriesName, ParserInfo[] infos)
|
private Series UpdateSeries(string seriesName, ParserInfo[] infos, bool forceUpdate)
|
||||||
{
|
{
|
||||||
var series = _seriesRepository.GetSeriesByName(seriesName);
|
var series = _seriesRepository.GetSeriesByName(seriesName);
|
||||||
|
|
||||||
@ -145,8 +124,9 @@ namespace API.Services
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
var volumes = UpdateVolumes(series, infos);
|
var volumes = UpdateVolumes(series, infos, forceUpdate);
|
||||||
series.Volumes = volumes;
|
series.Volumes = volumes;
|
||||||
|
// TODO: Instead of taking first entry, re-calculate without compression
|
||||||
series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage;
|
series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage;
|
||||||
return series;
|
return series;
|
||||||
}
|
}
|
||||||
@ -156,12 +136,13 @@ namespace API.Services
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="series">Series wanting to be updated</param>
|
/// <param name="series">Series wanting to be updated</param>
|
||||||
/// <param name="infos">Parser info</param>
|
/// <param name="infos">Parser info</param>
|
||||||
|
/// <param name="forceUpdate">Forces metadata update (cover image) even if it's already been set.</param>
|
||||||
/// <returns>Updated Volumes for given series</returns>
|
/// <returns>Updated Volumes for given series</returns>
|
||||||
private ICollection<Volume> UpdateVolumes(Series series, ParserInfo[] infos)
|
private ICollection<Volume> UpdateVolumes(Series series, ParserInfo[] infos, bool forceUpdate)
|
||||||
{
|
{
|
||||||
ICollection<Volume> volumes = new List<Volume>();
|
ICollection<Volume> volumes = new List<Volume>();
|
||||||
IList<Volume> existingVolumes = _seriesRepository.GetVolumes(series.Id).ToList();
|
IList<Volume> existingVolumes = _seriesRepository.GetVolumes(series.Id).ToList();
|
||||||
//IList<Volume> existingVolumes = Task.Run(() => _seriesRepository.GetVolumesAsync(series.Id)).Result.ToList();
|
|
||||||
foreach (var info in infos)
|
foreach (var info in infos)
|
||||||
{
|
{
|
||||||
var existingVolume = existingVolumes.SingleOrDefault(v => v.Name == info.Volumes);
|
var existingVolume = existingVolumes.SingleOrDefault(v => v.Name == info.Volumes);
|
||||||
@ -175,6 +156,11 @@ namespace API.Services
|
|||||||
FilePath = info.File
|
FilePath = info.File
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (forceUpdate || existingVolume.CoverImage == null || existingVolumes.Count == 0)
|
||||||
|
{
|
||||||
|
existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true);
|
||||||
|
}
|
||||||
volumes.Add(existingVolume);
|
volumes.Add(existingVolume);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -183,7 +169,7 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
Name = info.Volumes,
|
Name = info.Volumes,
|
||||||
Number = Int32.Parse(info.Volumes),
|
Number = Int32.Parse(info.Volumes),
|
||||||
CoverImage = ImageProvider.GetCoverImage(info.FullFilePath),
|
CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true),
|
||||||
Files = new List<MangaFile>()
|
Files = new List<MangaFile>()
|
||||||
{
|
{
|
||||||
new MangaFile()
|
new MangaFile()
|
||||||
@ -201,7 +187,7 @@ namespace API.Services
|
|||||||
return volumes;
|
return volumes;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void ScanLibrary(int libraryId)
|
public void ScanLibrary(int libraryId, bool forceUpdate = false)
|
||||||
{
|
{
|
||||||
var library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
|
var library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
|
||||||
_scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>();
|
_scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>();
|
||||||
@ -234,7 +220,7 @@ namespace API.Services
|
|||||||
library.Series = new List<Series>(); // Temp delete everything until we can mark items Unavailable
|
library.Series = new List<Series>(); // Temp delete everything until we can mark items Unavailable
|
||||||
foreach (var seriesKey in series.Keys)
|
foreach (var seriesKey in series.Keys)
|
||||||
{
|
{
|
||||||
var s = UpdateSeries(seriesKey, series[seriesKey].ToArray());
|
var s = UpdateSeries(seriesKey, series[seriesKey].ToArray(), forceUpdate);
|
||||||
_logger.LogInformation($"Created/Updated series {s.Name}");
|
_logger.LogInformation($"Created/Updated series {s.Name}");
|
||||||
library.Series.Add(s);
|
library.Series.Add(s);
|
||||||
}
|
}
|
||||||
@ -251,7 +237,6 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogError("There was a critical error that resulted in a failed scan. Please rescan.");
|
_logger.LogError("There was a critical error that resulted in a failed scan. Please rescan.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
_scannedSeries = null;
|
_scannedSeries = null;
|
||||||
}
|
}
|
||||||
@ -351,9 +336,6 @@ namespace API.Services
|
|||||||
// For diagnostic purposes.
|
// For diagnostic purposes.
|
||||||
Console.WriteLine("Processed {0} files in {1} milliseconds", fileCount, sw.ElapsedMilliseconds);
|
Console.WriteLine("Processed {0} files in {1} milliseconds", fileCount, sw.ElapsedMilliseconds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -46,7 +46,7 @@ namespace API
|
|||||||
}
|
}
|
||||||
|
|
||||||
app.UseHangfireDashboard();
|
app.UseHangfireDashboard();
|
||||||
backgroundJobs.Enqueue(() => Console.WriteLine("Hello world from Hangfire!"));
|
//backgroundJobs.Enqueue(() => Console.WriteLine("Hello world from Hangfire!"));
|
||||||
|
|
||||||
app.UseHttpsRedirection();
|
app.UseHttpsRedirection();
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user