diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index a51a81e0f..e4a29cb59 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -4,6 +4,7 @@ using System.Linq; using System.Threading.Tasks; using API.DTOs; using API.Entities; +using API.Extensions; using API.Interfaces; using AutoMapper; using Hangfire; @@ -23,10 +24,11 @@ namespace API.Controllers private readonly IMapper _mapper; private readonly ITaskScheduler _taskScheduler; private readonly ISeriesRepository _seriesRepository; + private readonly ICacheService _cacheService; public LibraryController(IDirectoryService directoryService, ILibraryRepository libraryRepository, ILogger logger, IUserRepository userRepository, - IMapper mapper, ITaskScheduler taskScheduler, ISeriesRepository seriesRepository) + IMapper mapper, ITaskScheduler taskScheduler, ISeriesRepository seriesRepository, ICacheService cacheService) { _directoryService = directoryService; _libraryRepository = libraryRepository; @@ -35,6 +37,7 @@ namespace API.Controllers _mapper = mapper; _taskScheduler = taskScheduler; _seriesRepository = seriesRepository; + _cacheService = cacheService; } /// @@ -153,11 +156,16 @@ namespace API.Controllers [HttpDelete("delete")] public async Task> DeleteLibrary(int libraryId) { + var username = User.GetUsername(); + _logger.LogInformation($"Library {libraryId} is being deleted by {username}."); + var series = await _seriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId); + var volumes = (await _seriesRepository.GetVolumesForSeriesAsync(series.Select(x => x.Id).ToArray())).ToList(); var result = await _libraryRepository.DeleteLibrary(libraryId); - if (result) + + if (result && volumes.Any()) { - // TODO: This should clear out any cache items associated with library + BackgroundJob.Enqueue(() => _cacheService.CleanupLibrary(libraryId, volumes.Select(x => x.Id).ToArray())); } return Ok(result); diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index b15a58496..299e5eea6 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -38,7 +38,6 @@ namespace API.Controllers } return Ok(volume.Files.Select(x => x.NumberOfPages).Sum()); - } [HttpGet("image")] @@ -48,6 +47,7 @@ namespace API.Controllers var volume = await _cacheService.Ensure(volumeId); var files = _directoryService.ListFiles(_cacheService.GetCachedPagePath(volume, page)); + //files.OrderBy(t => t, _numericComparer); var array = files.ToArray(); Array.Sort(array, _numericComparer); // TODO: Find a way to apply numericComparer to IList. var path = array.ElementAt(page); diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs index 585ff0de2..827b12d8e 100644 --- a/API/Data/SeriesRepository.cs +++ b/API/Data/SeriesRepository.cs @@ -83,5 +83,17 @@ namespace API.Data .Include(vol => vol.Files) .SingleOrDefaultAsync(vol => vol.Id == volumeId); } + + /// + /// Returns all volumes that contain a seriesId in passed array. + /// + /// + /// + public async Task> GetVolumesForSeriesAsync(int[] seriesIds) + { + return await _context.Volume + .Where(v => seriesIds.Contains(v.SeriesId)) + .ToListAsync(); + } } } \ No newline at end of file diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index 2fe5008e7..63d6a04a7 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -23,6 +23,8 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); services.AddScoped(); + + services.AddDbContext(options => { options.UseSqlite(config.GetConnectionString("DefaultConnection")); diff --git a/API/Extensions/DirectoryInfoExtensions.cs b/API/Extensions/DirectoryInfoExtensions.cs new file mode 100644 index 000000000..ab5766962 --- /dev/null +++ b/API/Extensions/DirectoryInfoExtensions.cs @@ -0,0 +1,11 @@ +namespace API.Extensions +{ + public static class DirectoryInfoExtensions + { + public static void Empty(this System.IO.DirectoryInfo directory) + { + foreach(System.IO.FileInfo file in directory.EnumerateFiles()) file.Delete(); + foreach(System.IO.DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true); + } + } +} \ No newline at end of file diff --git a/API/IO/ImageProvider.cs b/API/IO/ImageProvider.cs index d8122f37f..ebfe85d76 100644 --- a/API/IO/ImageProvider.cs +++ b/API/IO/ImageProvider.cs @@ -19,7 +19,7 @@ namespace API.IO /// public static byte[] GetCoverImage(string filepath, bool createThumbnail = false) { - if (!File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty(); + if (string.IsNullOrEmpty(filepath) || !File.Exists(filepath) || !Parser.Parser.IsArchive(filepath)) return Array.Empty(); using ZipArchive archive = ZipFile.OpenRead(filepath); if (!archive.HasFiles()) return Array.Empty(); diff --git a/API/Interfaces/ICacheService.cs b/API/Interfaces/ICacheService.cs index 6083c4810..695f0adc6 100644 --- a/API/Interfaces/ICacheService.cs +++ b/API/Interfaces/ICacheService.cs @@ -13,9 +13,18 @@ namespace API.Interfaces /// Volume for the passed volumeId. Side-effect from ensuring cache. Task Ensure(int volumeId); - bool Cleanup(Volume volume); + /// + /// Clears cache directory of all folders and files. + /// + /// + void Cleanup(); - //bool CleanupAll(); + /// + /// Clears cache directory of all volumes that belong to a given library. + /// + /// + void CleanupLibrary(int libraryId, int[] volumeIds); + /// /// Returns the absolute path of a cached page. diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs index 1320788ee..495b0fb77 100644 --- a/API/Interfaces/ISeriesRepository.cs +++ b/API/Interfaces/ISeriesRepository.cs @@ -18,5 +18,7 @@ namespace API.Interfaces Task GetSeriesDtoByIdAsync(int seriesId); Task GetVolumeAsync(int volumeId); + + Task> GetVolumesForSeriesAsync(int[] seriesIds); } } \ No newline at end of file diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index 714fd1f45..9bffa16f3 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -1,8 +1,11 @@ -using System.IO; +using System; +using System.IO; using System.Linq; using System.Threading.Tasks; using API.Entities; +using API.Extensions; using API.Interfaces; +using Microsoft.Extensions.Logging; namespace API.Services { @@ -10,11 +13,14 @@ namespace API.Services { private readonly IDirectoryService _directoryService; private readonly ISeriesRepository _seriesRepository; + private readonly ILogger _logger; + private readonly string _cacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "../cache/")); - public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository) + public CacheService(IDirectoryService directoryService, ISeriesRepository seriesRepository, ILogger logger) { _directoryService = directoryService; _seriesRepository = seriesRepository; + _logger = logger; } public async Task Ensure(int volumeId) @@ -31,10 +37,40 @@ namespace API.Services } - public bool Cleanup(Volume volume) + public void Cleanup() { - throw new System.NotImplementedException(); + _logger.LogInformation("Performing cleanup of Cache directory"); + + DirectoryInfo di = new DirectoryInfo(_cacheDirectory); + + try + { + di.Empty(); + } + catch (Exception ex) + { + _logger.LogError("There was an issue deleting one or more folders/files during cleanup.", ex); + } + + _logger.LogInformation("Cache directory purged."); } + + public void CleanupLibrary(int libraryId, int[] volumeIds) + { + _logger.LogInformation($"Running Cache cleanup on Library: {libraryId}"); + + foreach (var volume in volumeIds) + { + var di = new DirectoryInfo(Path.Join(_cacheDirectory, volume + "")); + if (di.Exists) + { + di.Delete(true); + } + + } + _logger.LogInformation("Cache directory purged"); + } + private string GetVolumeCachePath(int volumeId, MangaFile file) { diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 226703032..554f241b8 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -123,23 +123,23 @@ namespace API.Services Name = seriesName, OriginalName = seriesName, SortName = seriesName, - Summary = "" // TODO: Check if comicInfo.xml in file + Summary = "" // TODO: Check if comicInfo.xml in file and parse metadata out. }; } var volumes = UpdateVolumes(series, infos, forceUpdate); series.Volumes = volumes; series.CoverImage = volumes.OrderBy(x => x.Number).FirstOrDefault()?.CoverImage; - + //GetFiles() + return series; } private MangaFile CreateMangaFile(ParserInfo info) { _logger.LogDebug($"Creating File Entry for {info.FullFilePath}"); - int chapter; - int.TryParse(info.Chapters, out chapter); - _logger.LogDebug($"Chapter? {chapter}"); + int.TryParse(info.Chapters, out var chapter); + _logger.LogDebug($"Found Chapter: {chapter}"); return new MangaFile() { FilePath = info.FullFilePath, @@ -176,11 +176,7 @@ namespace API.Services { existingVolume.Files.Add(CreateMangaFile(info)); } - - if (forceUpdate || existingVolume.CoverImage == null || existingVolumes.Count == 0) - { - existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true); - } + volumes.Add(existingVolume); } else @@ -189,7 +185,6 @@ namespace API.Services if (existingVolume != null) { existingVolume.Files.Add(CreateMangaFile(info)); - existingVolume.CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true); } else { @@ -197,7 +192,6 @@ namespace API.Services { Name = info.Volumes, Number = Int32.Parse(info.Volumes), - CoverImage = ImageProvider.GetCoverImage(info.FullFilePath, true), Files = new List() { CreateMangaFile(info) @@ -210,20 +204,41 @@ namespace API.Services Console.WriteLine($"Adding volume {volumes.Last().Number} with File: {info.Filename}"); } + foreach (var volume in volumes) + { + if (forceUpdate || volume.CoverImage == null || !volume.Files.Any()) + { + var firstFile = volume.Files.OrderBy(x => x.Chapter).FirstOrDefault()?.FilePath; + volume.CoverImage = ImageProvider.GetCoverImage(firstFile, true); + } + } + return volumes; } public void ScanLibrary(int libraryId, bool forceUpdate) { var sw = Stopwatch.StartNew(); - var library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result; + Library library; + try + { + library = Task.Run(() => _libraryRepository.GetLibraryForIdAsync(libraryId)).Result; + } + catch (Exception ex) + { + // This usually only fails if user is not authenticated. + _logger.LogError($"There was an issue fetching Library {libraryId}.", ex); + return; + } + _scannedSeries = new ConcurrentDictionary>(); _logger.LogInformation($"Beginning scan on {library.Name}"); - + + var totalFiles = 0; foreach (var folderPath in library.Folders) { try { - TraverseTreeParallelForEach(folderPath.Path, (f) => + totalFiles = TraverseTreeParallelForEach(folderPath.Path, (f) => { try { @@ -266,7 +281,7 @@ namespace API.Services } _scannedSeries = null; - Console.WriteLine("Processed {0} files in {1} milliseconds", library.Name, sw.ElapsedMilliseconds); + _logger.LogInformation("Processed {0} files in {1} milliseconds for {2}", totalFiles, sw.ElapsedMilliseconds, library.Name); } public string GetExtractPath(int volumeId) @@ -359,7 +374,7 @@ namespace API.Services /// Directory to scan /// Action to apply on file path /// - private static void TraverseTreeParallelForEach(string root, Action action) + private static int TraverseTreeParallelForEach(string root, Action action) { //Count of files traversed and timer for diagnostic output int fileCount = 0; @@ -449,6 +464,8 @@ namespace API.Services foreach (string str in subDirs) dirs.Push(str); } + + return fileCount; } } diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index d5dcdcb5c..00fd5597e 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -1,15 +1,21 @@ using API.Interfaces; using Hangfire; +using Microsoft.Extensions.Logging; namespace API.Services { public class TaskScheduler : ITaskScheduler { + private readonly ILogger _logger; private readonly BackgroundJobServer _client; - public TaskScheduler() + public TaskScheduler(ICacheService cacheService, ILogger logger) { + _logger = logger; _client = new BackgroundJobServer(); + + _logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis."); + RecurringJob.AddOrUpdate(() => cacheService.Cleanup(), Cron.Daily); }