Cache cleanup implemented

This commit is contained in:
Joseph Milazzo 2021-01-11 15:39:25 -06:00
parent f737f662df
commit 731e3a9c5e
11 changed files with 132 additions and 29 deletions

View File

@ -4,6 +4,7 @@ using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using AutoMapper;
using Hangfire;
@ -23,10 +24,11 @@ namespace API.Controllers
private readonly IMapper _mapper;
private readonly ITaskScheduler _taskScheduler;
private readonly ISeriesRepository _seriesRepository;
private readonly ICacheService _cacheService;
public LibraryController(IDirectoryService directoryService,
ILibraryRepository libraryRepository, ILogger<LibraryController> logger, IUserRepository userRepository,
IMapper mapper, ITaskScheduler taskScheduler, ISeriesRepository seriesRepository)
IMapper mapper, ITaskScheduler taskScheduler, ISeriesRepository seriesRepository, ICacheService cacheService)
{
_directoryService = directoryService;
_libraryRepository = libraryRepository;
@ -35,6 +37,7 @@ namespace API.Controllers
_mapper = mapper;
_taskScheduler = taskScheduler;
_seriesRepository = seriesRepository;
_cacheService = cacheService;
}
/// <summary>
@ -153,11 +156,16 @@ namespace API.Controllers
[HttpDelete("delete")]
public async Task<ActionResult<bool>> DeleteLibrary(int libraryId)
{
var username = User.GetUsername();
_logger.LogInformation($"Library {libraryId} is being deleted by {username}.");
var series = await _seriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId);
var volumes = (await _seriesRepository.GetVolumesForSeriesAsync(series.Select(x => x.Id).ToArray())).ToList();
var result = await _libraryRepository.DeleteLibrary(libraryId);
if (result)
if (result && volumes.Any())
{
// TODO: This should clear out any cache items associated with library
BackgroundJob.Enqueue(() => _cacheService.CleanupLibrary(libraryId, volumes.Select(x => x.Id).ToArray()));
}
return Ok(result);

View File

@ -38,7 +38,6 @@ namespace API.Controllers
}
return Ok(volume.Files.Select(x => x.NumberOfPages).Sum());
}
[HttpGet("image")]
@ -48,6 +47,7 @@ namespace API.Controllers
var volume = await _cacheService.Ensure(volumeId);
var files = _directoryService.ListFiles(_cacheService.GetCachedPagePath(volume, page));
//files.OrderBy(t => t, _numericComparer);
var array = files.ToArray();
Array.Sort(array, _numericComparer); // TODO: Find a way to apply numericComparer to IList.
var path = array.ElementAt(page);

View File

@ -83,5 +83,17 @@ namespace API.Data
.Include(vol => vol.Files)
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
}
/// <summary>
/// Returns all volumes that contain a seriesId in passed array.
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(int[] seriesIds)
{
return await _context.Volume
.Where(v => seriesIds.Contains(v.SeriesId))
.ToListAsync();
}
}
}

View File

@ -23,6 +23,8 @@ namespace API.Extensions
services.AddScoped<ISeriesRepository, SeriesRepository>();
services.AddScoped<IDirectoryService, DirectoryService>();
services.AddScoped<ILibraryRepository, LibraryRepository>();
services.AddDbContext<DataContext>(options =>
{
options.UseSqlite(config.GetConnectionString("DefaultConnection"));

View File

@ -0,0 +1,11 @@
namespace API.Extensions
{
public static class DirectoryInfoExtensions
{
public static void Empty(this System.IO.DirectoryInfo directory)
{
foreach(System.IO.FileInfo file in directory.EnumerateFiles()) file.Delete();
foreach(System.IO.DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true);
}
}
}

View File

@ -19,7 +19,7 @@ namespace API.IO
/// <returns></returns>
public static byte[] GetCoverImage(string filepath, bool createThumbnail = false)
{
if (!File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty<byte>();
if (string.IsNullOrEmpty(filepath) || !File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty<byte>();
using ZipArchive archive = ZipFile.OpenRead(filepath);
if (!archive.HasFiles()) return Array.Empty<byte>();

View File

@ -13,9 +13,18 @@ namespace API.Interfaces
/// <returns>Volume for the passed volumeId. Side-effect from ensuring cache.</returns>
Task<Volume> Ensure(int volumeId);
bool Cleanup(Volume volume);
/// <summary>
/// Clears cache directory of all folders and files.
/// </summary>
/// <param name="volume"></param>
void Cleanup();
//bool CleanupAll();
/// <summary>
/// Clears cache directory of all volumes that belong to a given library.
/// </summary>
/// <param name="libraryId"></param>
void CleanupLibrary(int libraryId, int[] volumeIds);
/// <summary>
/// Returns the absolute path of a cached page.

View File

@ -18,5 +18,7 @@ namespace API.Interfaces
Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(int[] seriesIds);
}
}

View File

@ -1,8 +1,11 @@
using System.IO;
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using Microsoft.Extensions.Logging;
namespace API.Services
{
@ -10,11 +13,14 @@ namespace API.Services
{
private readonly IDirectoryService _directoryService;
private readonly ISeriesRepository _seriesRepository;
private readonly ILogger<CacheService> _logger;
private readonly string _cacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../cache/"));
public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository)
public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository, ILogger<CacheService> logger)
{
_directoryService = directoryService;
_seriesRepository = seriesRepository;
_logger = logger;
}
public async Task<Volume> Ensure(int volumeId)
@ -31,10 +37,40 @@ namespace API.Services
}
public bool Cleanup(Volume volume)
public void Cleanup()
{
throw new System.NotImplementedException();
_logger.LogInformation("Performing cleanup of Cache directory");
DirectoryInfo di = new DirectoryInfo(_cacheDirectory);
try
{
di.Empty();
}
catch (Exception ex)
{
_logger.LogError("There was an issue deleting one or more folders/files during cleanup.", ex);
}
_logger.LogInformation("Cache directory purged.");
}
public void CleanupLibrary(int libraryId, int[] volumeIds)
{
_logger.LogInformation($"Running Cache cleanup on Library: {libraryId}");
foreach (var volume in volumeIds)
{
var di = new DirectoryInfo(Path.Join(_cacheDirectory, volume + ""));
if (di.Exists)
{
di.Delete(true);
}
}
_logger.LogInformation("Cache directory purged");
}
private string GetVolumeCachePath(int volumeId, MangaFile file)
{

View File

@ -123,23 +123,23 @@ namespace API.Services
Name = seriesName,
OriginalName = seriesName,
SortName = seriesName,
Summary = "" // TODO: Check if comicInfo.xml in file
Summary = "" // TODO: Check if comicInfo.xml in file and parse metadata out.
};
}
var volumes = UpdateVolumes(series, infos, forceUpdate);
series.Volumes = volumes;
series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage;
//GetFiles()
return series;
}
private MangaFile CreateMangaFile(ParserInfo info)
{
_logger.LogDebug($"Creating File Entry for {info.FullFilePath}");
int chapter;
int.TryParse(info.Chapters, out chapter);
_logger.LogDebug($"Chapter? {chapter}");
int.TryParse(info.Chapters, out var chapter);
_logger.LogDebug($"Found Chapter: {chapter}");
return new MangaFile()
{
FilePath = info.FullFilePath,
@ -176,11 +176,7 @@ namespace API.Services
{
existingVolume.Files.Add(CreateMangaFile(info));
}
if (forceUpdate || existingVolume.CoverImage == null || existingVolumes.Count == 0)
{
existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true);
}
volumes.Add(existingVolume);
}
else
@ -189,7 +185,6 @@ namespace API.Services
if (existingVolume != null)
{
existingVolume.Files.Add(CreateMangaFile(info));
existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true);
}
else
{
@ -197,7 +192,6 @@ namespace API.Services
{
Name = info.Volumes,
Number = Int32.Parse(info.Volumes),
CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true),
Files = new List<MangaFile>()
{
CreateMangaFile(info)
@ -210,20 +204,41 @@ namespace API.Services
Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.Filename}");
}
foreach (var volume in volumes)
{
if (forceUpdate || volume.CoverImage == null || !volume.Files.Any())
{
var firstFile = volume.Files.OrderBy(x => x.Chapter).FirstOrDefault()?.FilePath;
volume.CoverImage = ImageProvider.GetCoverImage(firstFile, true);
}
}
return volumes;
}
public void ScanLibrary(int libraryId, bool forceUpdate)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
Library library;
try
{
library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result;
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
_logger.LogError($"There was an issue fetching Library {libraryId}.", ex);
return;
}
_scannedSeries = new ConcurrentDictionary<string, ConcurrentBag<ParserInfo>>();
_logger.LogInformation($"Beginning scan on {library.Name}");
var totalFiles = 0;
foreach (var folderPath in library.Folders)
{
try {
TraverseTreeParallelForEach(folderPath.Path, (f) =>
totalFiles = TraverseTreeParallelForEach(folderPath.Path, (f) =>
{
try
{
@ -266,7 +281,7 @@ namespace API.Services
}
_scannedSeries = null;
Console.WriteLine("Processed {0} files in {1} milliseconds", library.Name, sw.ElapsedMilliseconds);
_logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name);
}
public string GetExtractPath(int volumeId)
@ -359,7 +374,7 @@ namespace API.Services
/// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param>
/// <exception cref="ArgumentException"></exception>
private static void TraverseTreeParallelForEach(string root, Action<string> action)
private static int TraverseTreeParallelForEach(string root, Action<string> action)
{
//Count of files traversed and timer for diagnostic output
int fileCount = 0;
@ -449,6 +464,8 @@ namespace API.Services
foreach (string str in subDirs)
dirs.Push(str);
}
return fileCount;
}
}

View File

@ -1,15 +1,21 @@
using API.Interfaces;
using Hangfire;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class TaskScheduler : ITaskScheduler
{
private readonly ILogger<TaskScheduler> _logger;
private readonly BackgroundJobServer _client;
public TaskScheduler()
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger)
{
_logger = logger;
_client = new BackgroundJobServer();
_logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis.");
RecurringJob.AddOrUpdate(() => cacheService.Cleanup(), Cron.Daily);
}