Moved some files around, fixed Log file fetching to use zip and work more reliably. Refactored some code in directory service.

This commit is contained in:
Joseph Milazzo 2021-02-24 15:58:17 -06:00
parent bbb4240e20
commit ecfb40cb2d
15 changed files with 128 additions and 47 deletions

View File

@ -1,5 +1,6 @@
using API.Interfaces; using API.Interfaces;
using API.Services; using API.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NSubstitute; using NSubstitute;

View File

@ -4,6 +4,7 @@ using API.Entities;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using API.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NSubstitute; using NSubstitute;
using Xunit; using Xunit;

View File

@ -1,7 +1,9 @@
using System; using System;
using System.IO; using System.IO;
using System.IO.Compression;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
@ -18,14 +20,18 @@ namespace API.Controllers
private readonly ILogger<ServerController> _logger; private readonly ILogger<ServerController> _logger;
private readonly IConfiguration _config; private readonly IConfiguration _config;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly IBackupService _backupService;
private readonly ITaskScheduler _taskScheduler;
public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config, public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config,
IDirectoryService directoryService) IDirectoryService directoryService, IBackupService backupService, ITaskScheduler taskScheduler)
{ {
_applicationLifetime = applicationLifetime; _applicationLifetime = applicationLifetime;
_logger = logger; _logger = logger;
_config = config; _config = config;
_directoryService = directoryService; _directoryService = directoryService;
_backupService = backupService;
_taskScheduler = taskScheduler;
} }
[HttpPost("restart")] [HttpPost("restart")]
@ -40,40 +46,34 @@ namespace API.Controllers
[HttpGet("logs")] [HttpGet("logs")]
public async Task<ActionResult> GetLogs() public async Task<ActionResult> GetLogs()
{ {
// TODO: Zip up the log files var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName());
var maxRollingFiles = int.Parse(_config.GetSection("Logging").GetSection("File").GetSection("MaxRollingFiles").Value);
var loggingSection = _config.GetSection("Logging").GetSection("File").GetSection("Path").Value;
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
FileInfo fi = new FileInfo(loggingSection);
var files = _directoryService.GetFilesWithExtension(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log");
Console.WriteLine(files);
var logFile = Path.Join(Directory.GetCurrentDirectory(), loggingSection);
_logger.LogInformation("Fetching download of logs: {LogFile}", logFile);
// First, copy the file to temp
var originalFile = new FileInfo(logFile);
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
_directoryService.ExistOrCreate(tempDirectory); var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var tempLocation = Path.Join(tempDirectory, originalFile.Name);
originalFile.CopyTo(tempLocation); // TODO: Make this unique based on date
// Read into memory var tempLocation = Path.Join(tempDirectory, "logs_" + dateString);
await using var memory = new MemoryStream(); _directoryService.ExistOrCreate(tempLocation);
// We need to copy it else it will throw an exception if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
await using (var stream = new FileStream(tempLocation, FileMode.Open, FileAccess.Read))
{ {
await stream.CopyToAsync(memory); return BadRequest("Unable to copy files to temp directory for log download.");
} }
memory.Position = 0;
// Delete temp var zipPath = Path.Join(tempDirectory, $"kavita_logs_{dateString}.zip");
(new FileInfo(tempLocation)).Delete(); try
{
ZipFile.CreateFromDirectory(tempLocation, zipPath);
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue when archiving library backup");
return BadRequest("There was an issue when archiving library backup");
}
var fileBytes = await _directoryService.ReadFileAsync(zipPath);
return File(memory, "text/plain", Path.GetFileName(logFile)); _directoryService.ClearAndDeleteDirectory(tempLocation);
(new FileInfo(zipPath)).Delete();
return File(fileBytes, "application/zip", Path.GetFileName(zipPath));
} }
} }
} }

View File

@ -3,6 +3,7 @@ using API.Helpers;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using API.Services;
using API.Services.Tasks;
using AutoMapper; using AutoMapper;
using Hangfire; using Hangfire;
using Hangfire.LiteDB; using Hangfire.LiteDB;
@ -27,6 +28,7 @@ namespace API.Extensions
services.AddScoped<IArchiveService, ArchiveService>(); services.AddScoped<IArchiveService, ArchiveService>();
services.AddScoped<IMetadataService, MetadataService>(); services.AddScoped<IMetadataService, MetadataService>();
services.AddScoped<IBackupService, BackupService>(); services.AddScoped<IBackupService, BackupService>();
services.AddScoped<ICleanupService, CleanupService>();

View File

@ -9,5 +9,6 @@
void ScanLibrary(int libraryId, bool forceUpdate = false); void ScanLibrary(int libraryId, bool forceUpdate = false);
void CleanupChapters(int[] chapterIds); void CleanupChapters(int[] chapterIds);
void RefreshMetadata(int libraryId, bool forceUpdate = true); void RefreshMetadata(int libraryId, bool forceUpdate = true);
void CleanupTemp();
} }
} }

View File

@ -11,5 +11,6 @@ namespace API.Interfaces.Services
byte[] GetCoverImage(string filepath, bool createThumbnail = false); byte[] GetCoverImage(string filepath, bool createThumbnail = false);
bool IsValidArchive(string archivePath); bool IsValidArchive(string archivePath);
string GetSummaryInfo(string archivePath); string GetSummaryInfo(string archivePath);
} }
} }

View File

@ -1,7 +1,17 @@
namespace API.Interfaces.Services using System.Collections.Generic;
using Microsoft.Extensions.Configuration;
namespace API.Interfaces.Services
{ {
public interface IBackupService public interface IBackupService
{ {
void BackupDatabase(); void BackupDatabase();
/// <summary>
/// Returns a list of full paths of the logs files detailed in <see cref="IConfiguration"/>.
/// </summary>
/// <param name="maxRollingFiles"></param>
/// <param name="logFileName"></param>
/// <returns></returns>
IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName);
} }
} }

View File

@ -0,0 +1,7 @@
namespace API.Interfaces.Services
{
public interface ICleanupService
{
void Cleanup();
}
}

View File

@ -29,6 +29,8 @@ namespace API.Interfaces.Services
/// <returns></returns> /// <returns></returns>
bool ExistOrCreate(string directoryPath); bool ExistOrCreate(string directoryPath);
Task<byte[]> ReadFileAsync(string path);
/// <summary> /// <summary>
/// Deletes all files within the directory, then the directory itself. /// Deletes all files within the directory, then the directory itself.
/// </summary> /// </summary>

View File

@ -6,6 +6,7 @@ using System.Linq;
using System.Xml.Serialization; using System.Xml.Serialization;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NetVips; using NetVips;

View File

@ -92,7 +92,8 @@ namespace API.Services
public void ClearDirectory(string directoryPath) public void ClearDirectory(string directoryPath)
{ {
DirectoryInfo di = new DirectoryInfo(directoryPath); var di = new DirectoryInfo(directoryPath);
if (!di.Exists) return;
foreach (var file in di.EnumerateFiles()) foreach (var file in di.EnumerateFiles())
{ {
@ -156,7 +157,7 @@ namespace API.Services
return new ImageDto return new ImageDto
{ {
Content = await File.ReadAllBytesAsync(imagePath), Content = await ReadFileAsync(imagePath),
Filename = Path.GetFileNameWithoutExtension(imagePath), Filename = Path.GetFileNameWithoutExtension(imagePath),
FullPath = Path.GetFullPath(imagePath), FullPath = Path.GetFullPath(imagePath),
Width = image.Width, Width = image.Width,
@ -165,6 +166,12 @@ namespace API.Services
}; };
} }
public async Task<byte[]> ReadFileAsync(string path)
{
if (!File.Exists(path)) return Array.Empty<byte>();
return await File.ReadAllBytesAsync(path);
}
/// <summary> /// <summary>
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed /// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed

View File

@ -1,4 +1,5 @@
using System.Threading.Tasks; using System.IO;
using System.Threading.Tasks;
using API.Entities.Enums; using API.Entities.Enums;
using API.Helpers.Converters; using API.Helpers.Converters;
using API.Interfaces; using API.Interfaces;
@ -16,6 +17,8 @@ namespace API.Services
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IMetadataService _metadataService; private readonly IMetadataService _metadataService;
private readonly IBackupService _backupService; private readonly IBackupService _backupService;
private readonly ICleanupService _cleanupService;
private readonly IDirectoryService _directoryService;
public BackgroundJobServer Client => new BackgroundJobServer(); public BackgroundJobServer Client => new BackgroundJobServer();
// new BackgroundJobServerOptions() // new BackgroundJobServerOptions()
@ -24,7 +27,8 @@ namespace API.Services
// } // }
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService, public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService) IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
IDirectoryService directoryService)
{ {
_cacheService = cacheService; _cacheService = cacheService;
_logger = logger; _logger = logger;
@ -32,6 +36,8 @@ namespace API.Services
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_metadataService = metadataService; _metadataService = metadataService;
_backupService = backupService; _backupService = backupService;
_cleanupService = cleanupService;
_directoryService = directoryService;
ScheduleTasks(); ScheduleTasks();
@ -65,7 +71,7 @@ namespace API.Services
RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), Cron.Weekly); RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), Cron.Weekly);
} }
RecurringJob.AddOrUpdate(() => _cacheService.Cleanup(), Cron.Daily); RecurringJob.AddOrUpdate(() => _cleanupService.Cleanup(), Cron.Daily);
} }
public void ScanLibrary(int libraryId, bool forceUpdate = false) public void ScanLibrary(int libraryId, bool forceUpdate = false)
@ -85,6 +91,12 @@ namespace API.Services
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate))); BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate)));
} }
public void CleanupTemp()
{
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
BackgroundJob.Enqueue((() => _directoryService.ClearDirectory(tempDirectory)));
}
public void BackupDatabase() public void BackupDatabase()
{ {
BackgroundJob.Enqueue(() => _backupService.BackupDatabase()); BackgroundJob.Enqueue(() => _backupService.BackupDatabase());

View File

@ -11,7 +11,7 @@ using API.Interfaces.Services;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Services namespace API.Services.Tasks
{ {
public class BackupService : IBackupService public class BackupService : IBackupService
{ {
@ -27,16 +27,10 @@ namespace API.Services
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_logger = logger; _logger = logger;
_directoryService = directoryService; _directoryService = directoryService;
var maxRollingFiles = config.GetMaxRollingFiles(); var maxRollingFiles = config.GetMaxRollingFiles();
var loggingSection = config.GetLoggingFileName(); var loggingSection = config.GetLoggingFileName();
var files = LogFiles(maxRollingFiles, loggingSection);
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(loggingSection);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"};
_backupFiles = new List<string>() _backupFiles = new List<string>()
{ {
"appsettings.json", "appsettings.json",
@ -52,6 +46,17 @@ namespace API.Services
} }
} }
public IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName)
{
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(logFileName);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"};
return files;
}
public void BackupDatabase() public void BackupDatabase()
{ {
_logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now); _logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now);

View File

@ -0,0 +1,33 @@
using System.IO;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
{
/// <summary>
/// Cleans up after operations on reoccurring basis
/// </summary>
public class CleanupService : ICleanupService
{
private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService;
private readonly ILogger<CleanupService> _logger;
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger)
{
_cacheService = cacheService;
_directoryService = directoryService;
_logger = logger;
}
public void Cleanup()
{
_logger.LogInformation("Cleaning temp directory");
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
_directoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup();
}
}
}

View File

@ -4,7 +4,6 @@ using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
@ -14,8 +13,7 @@ using API.Parser;
using Hangfire; using Hangfire;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
[assembly: InternalsVisibleTo("API.Tests")] namespace API.Services.Tasks
namespace API.Services
{ {
public class ScannerService : IScannerService public class ScannerService : IScannerService
{ {