Moved some files around, fixed Log file fetching to use zip and work more reliably. Refactored some code in directory service.

This commit is contained in:
Joseph Milazzo 2021-02-24 15:58:17 -06:00
parent bbb4240e20
commit ecfb40cb2d
15 changed files with 128 additions and 47 deletions

View File

@ -1,5 +1,6 @@
using API.Interfaces;
using API.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using NSubstitute;

View File

@ -4,6 +4,7 @@ using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;

View File

@ -1,7 +1,9 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading.Tasks;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
@ -18,14 +20,18 @@ namespace API.Controllers
private readonly ILogger<ServerController> _logger;
private readonly IConfiguration _config;
private readonly IDirectoryService _directoryService;
private readonly IBackupService _backupService;
private readonly ITaskScheduler _taskScheduler;
public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config,
IDirectoryService directoryService)
IDirectoryService directoryService, IBackupService backupService, ITaskScheduler taskScheduler)
{
_applicationLifetime = applicationLifetime;
_logger = logger;
_config = config;
_directoryService = directoryService;
_backupService = backupService;
_taskScheduler = taskScheduler;
}
[HttpPost("restart")]
@ -40,40 +46,34 @@ namespace API.Controllers
[HttpGet("logs")]
public async Task<ActionResult> GetLogs()
{
// TODO: Zip up the log files
var maxRollingFiles = int.Parse(_config.GetSection("Logging").GetSection("File").GetSection("MaxRollingFiles").Value);
var loggingSection = _config.GetSection("Logging").GetSection("File").GetSection("Path").Value;
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
FileInfo fi = new FileInfo(loggingSection);
var files = _directoryService.GetFilesWithExtension(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log");
Console.WriteLine(files);
var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName());
var logFile = Path.Join(Directory.GetCurrentDirectory(), loggingSection);
_logger.LogInformation("Fetching download of logs: {LogFile}", logFile);
// First, copy the file to temp
var originalFile = new FileInfo(logFile);
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
_directoryService.ExistOrCreate(tempDirectory);
var tempLocation = Path.Join(tempDirectory, originalFile.Name);
originalFile.CopyTo(tempLocation); // TODO: Make this unique based on date
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
// Read into memory
await using var memory = new MemoryStream();
// We need to copy it else it will throw an exception
await using (var stream = new FileStream(tempLocation, FileMode.Open, FileAccess.Read))
{
await stream.CopyToAsync(memory);
var tempLocation = Path.Join(tempDirectory, "logs_" + dateString);
_directoryService.ExistOrCreate(tempLocation);
if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
{
return BadRequest("Unable to copy files to temp directory for log download.");
}
memory.Position = 0;
// Delete temp
(new FileInfo(tempLocation)).Delete();
var zipPath = Path.Join(tempDirectory, $"kavita_logs_{dateString}.zip");
try
{
ZipFile.CreateFromDirectory(tempLocation, zipPath);
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue when archiving library backup");
return BadRequest("There was an issue when archiving library backup");
}
var fileBytes = await _directoryService.ReadFileAsync(zipPath);
return File(memory, "text/plain", Path.GetFileName(logFile));
_directoryService.ClearAndDeleteDirectory(tempLocation);
(new FileInfo(zipPath)).Delete();
return File(fileBytes, "application/zip", Path.GetFileName(zipPath));
}
}
}

View File

@ -3,6 +3,7 @@ using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using API.Services.Tasks;
using AutoMapper;
using Hangfire;
using Hangfire.LiteDB;
@ -27,6 +28,7 @@ namespace API.Extensions
services.AddScoped<IArchiveService, ArchiveService>();
services.AddScoped<IMetadataService, MetadataService>();
services.AddScoped<IBackupService, BackupService>();
services.AddScoped<ICleanupService, CleanupService>();

View File

@ -9,5 +9,6 @@
void ScanLibrary(int libraryId, bool forceUpdate = false);
void CleanupChapters(int[] chapterIds);
void RefreshMetadata(int libraryId, bool forceUpdate = true);
void CleanupTemp();
}
}

View File

@ -11,5 +11,6 @@ namespace API.Interfaces.Services
byte[] GetCoverImage(string filepath, bool createThumbnail = false);
bool IsValidArchive(string archivePath);
string GetSummaryInfo(string archivePath);
}
}

View File

@ -1,7 +1,17 @@
namespace API.Interfaces.Services
using System.Collections.Generic;
using Microsoft.Extensions.Configuration;
namespace API.Interfaces.Services
{
public interface IBackupService
{
void BackupDatabase();
/// <summary>
/// Returns a list of full paths of the logs files detailed in <see cref="IConfiguration"/>.
/// </summary>
/// <param name="maxRollingFiles"></param>
/// <param name="logFileName"></param>
/// <returns></returns>
IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName);
}
}

View File

@ -0,0 +1,7 @@
namespace API.Interfaces.Services
{
public interface ICleanupService
{
void Cleanup();
}
}

View File

@ -29,6 +29,8 @@ namespace API.Interfaces.Services
/// <returns></returns>
bool ExistOrCreate(string directoryPath);
Task<byte[]> ReadFileAsync(string path);
/// <summary>
/// Deletes all files within the directory, then the directory itself.
/// </summary>

View File

@ -6,6 +6,7 @@ using System.Linq;
using System.Xml.Serialization;
using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging;
using NetVips;

View File

@ -92,7 +92,8 @@ namespace API.Services
public void ClearDirectory(string directoryPath)
{
DirectoryInfo di = new DirectoryInfo(directoryPath);
var di = new DirectoryInfo(directoryPath);
if (!di.Exists) return;
foreach (var file in di.EnumerateFiles())
{
@ -156,7 +157,7 @@ namespace API.Services
return new ImageDto
{
Content = await File.ReadAllBytesAsync(imagePath),
Content = await ReadFileAsync(imagePath),
Filename = Path.GetFileNameWithoutExtension(imagePath),
FullPath = Path.GetFullPath(imagePath),
Width = image.Width,
@ -165,6 +166,12 @@ namespace API.Services
};
}
public async Task<byte[]> ReadFileAsync(string path)
{
if (!File.Exists(path)) return Array.Empty<byte>();
return await File.ReadAllBytesAsync(path);
}
/// <summary>
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed

View File

@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System.IO;
using System.Threading.Tasks;
using API.Entities.Enums;
using API.Helpers.Converters;
using API.Interfaces;
@ -16,6 +17,8 @@ namespace API.Services
private readonly IUnitOfWork _unitOfWork;
private readonly IMetadataService _metadataService;
private readonly IBackupService _backupService;
private readonly ICleanupService _cleanupService;
private readonly IDirectoryService _directoryService;
public BackgroundJobServer Client => new BackgroundJobServer();
// new BackgroundJobServerOptions()
@ -24,7 +27,8 @@ namespace API.Services
// }
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService)
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
IDirectoryService directoryService)
{
_cacheService = cacheService;
_logger = logger;
@ -32,6 +36,8 @@ namespace API.Services
_unitOfWork = unitOfWork;
_metadataService = metadataService;
_backupService = backupService;
_cleanupService = cleanupService;
_directoryService = directoryService;
ScheduleTasks();
@ -65,7 +71,7 @@ namespace API.Services
RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), Cron.Weekly);
}
RecurringJob.AddOrUpdate(() => _cacheService.Cleanup(), Cron.Daily);
RecurringJob.AddOrUpdate(() => _cleanupService.Cleanup(), Cron.Daily);
}
public void ScanLibrary(int libraryId, bool forceUpdate = false)
@ -85,6 +91,12 @@ namespace API.Services
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate)));
}
public void CleanupTemp()
{
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
BackgroundJob.Enqueue((() => _directoryService.ClearDirectory(tempDirectory)));
}
public void BackupDatabase()
{
BackgroundJob.Enqueue(() => _backupService.BackupDatabase());

View File

@ -11,7 +11,7 @@ using API.Interfaces.Services;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
namespace API.Services
namespace API.Services.Tasks
{
public class BackupService : IBackupService
{
@ -27,16 +27,10 @@ namespace API.Services
_unitOfWork = unitOfWork;
_logger = logger;
_directoryService = directoryService;
var maxRollingFiles = config.GetMaxRollingFiles();
var loggingSection = config.GetLoggingFileName();
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(loggingSection);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"};
var files = LogFiles(maxRollingFiles, loggingSection);
_backupFiles = new List<string>()
{
"appsettings.json",
@ -52,6 +46,17 @@ namespace API.Services
}
}
public IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName)
{
var multipleFileRegex = maxRollingFiles > 0 ? @"\d*" : string.Empty;
var fi = new FileInfo(logFileName);
var files = maxRollingFiles > 0
? _directoryService.GetFiles(Directory.GetCurrentDirectory(), $@"{fi.Name}{multipleFileRegex}\.log")
: new string[] {"kavita.log"};
return files;
}
public void BackupDatabase()
{
_logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now);

View File

@ -0,0 +1,33 @@
using System.IO;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks
{
/// <summary>
/// Cleans up after operations on reoccurring basis
/// </summary>
public class CleanupService : ICleanupService
{
private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService;
private readonly ILogger<CleanupService> _logger;
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger)
{
_cacheService = cacheService;
_directoryService = directoryService;
_logger = logger;
}
public void Cleanup()
{
_logger.LogInformation("Cleaning temp directory");
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
_directoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup();
}
}
}

View File

@ -4,7 +4,6 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;
@ -14,8 +13,7 @@ using API.Parser;
using Hangfire;
using Microsoft.Extensions.Logging;
[assembly: InternalsVisibleTo("API.Tests")]
namespace API.Services
namespace API.Services.Tasks
{
public class ScannerService : IScannerService
{