Code cleanup. Implemented ability to schedule Library Backups.

This commit is contained in:
Joseph Milazzo 2021-02-17 08:58:36 -06:00
parent 83b9394b17
commit b4ee16d8d1
35 changed files with 217 additions and 91 deletions

View File

@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using API.Entities.Enums;
using API.Parser;

View File

@ -1,6 +1,6 @@
using System.IO;
using System.IO.Compression;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;

View File

@ -10,10 +10,10 @@ namespace API.Tests.Services
// private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
// private readonly IDirectoryService _directoryService = Substitute.For<DirectoryService>();
public CacheServiceTests()
{
//_cacheService = new CacheService(_logger, _unitOfWork, _archiveService, _directoryService);
}
// public CacheServiceTests()
// {
// //_cacheService = new CacheService(_logger, _unitOfWork, _archiveService, _directoryService);
// }
//string GetCachedPagePath(Volume volume, int page)
[Fact]

View File

@ -1,5 +1,4 @@
using API.Interfaces;
using API.Services;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;

View File

@ -1,15 +1,10 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;

View File

@ -7,6 +7,7 @@ using API.DTOs;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using AutoMapper;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Identity;
@ -43,7 +44,7 @@ namespace API.Controllers
[HttpPost("reset-password")]
public async Task<ActionResult> UpdatePassword(ResetPasswordDto resetPasswordDto)
{
_logger.LogInformation($"{User.GetUsername()} is changing {resetPasswordDto.UserName}'s password.");
_logger.LogInformation("{UserName} is changing {ResetUser}'s password", User.GetUsername(), resetPasswordDto.UserName);
var user = await _userManager.Users.SingleAsync(x => x.UserName == resetPasswordDto.UserName);
var result = await _userManager.RemovePasswordAsync(user);
if (!result.Succeeded) return BadRequest("Unable to update password");
@ -77,14 +78,14 @@ namespace API.Controllers
// When we register an admin, we need to grant them access to all Libraries.
if (registerDto.IsAdmin)
{
_logger.LogInformation($"{user.UserName} is being registered as admin. Granting access to all libraries.");
_logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", user.UserName);
var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList();
foreach (var lib in libraries)
{
lib.AppUsers ??= new List<AppUser>();
lib.AppUsers.Add(user);
}
if (libraries.Any() && !await _unitOfWork.Complete()) _logger.LogError("There was an issue granting library access. Please do this manually.");
if (libraries.Any() && !await _unitOfWork.Complete()) _logger.LogError("There was an issue granting library access. Please do this manually");
}
return new UserDto
@ -116,7 +117,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(user);
await _unitOfWork.Complete();
_logger.LogInformation($"{user.UserName} logged in at {user.LastActive}");
_logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive);
return new UserDto
{

View File

@ -1,10 +1,19 @@
using System.IO;
using API.Interfaces;
using Microsoft.AspNetCore.Mvc;
namespace API.Controllers
{
public class FallbackController : Controller
{
private readonly ITaskScheduler _taskScheduler;
public FallbackController(ITaskScheduler taskScheduler)
{
// This is used to load TaskScheduler on startup without having to navigate to a Controller that uses.
_taskScheduler = taskScheduler;
}
public ActionResult Index()
{
return PhysicalFile(Path.Combine(Directory.GetCurrentDirectory(), "wwwroot", "index.html"), "text/HTML");

View File

@ -2,18 +2,15 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using AutoMapper;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Controllers
@ -26,18 +23,16 @@ namespace API.Controllers
private readonly IMapper _mapper;
private readonly ITaskScheduler _taskScheduler;
private readonly IUnitOfWork _unitOfWork;
private readonly DataContext _dataContext; // TODO: Remove, only for FTS prototyping
public LibraryController(IDirectoryService directoryService,
ILogger<LibraryController> logger, IMapper mapper, ITaskScheduler taskScheduler,
IUnitOfWork unitOfWork, DataContext dataContext)
IUnitOfWork unitOfWork)
{
_directoryService = directoryService;
_logger = logger;
_mapper = mapper;
_taskScheduler = taskScheduler;
_unitOfWork = unitOfWork;
_dataContext = dataContext;
}
/// <summary>
@ -182,7 +177,7 @@ namespace API.Controllers
public async Task<ActionResult<bool>> DeleteLibrary(int libraryId)
{
var username = User.GetUsername();
_logger.LogInformation($"Library {libraryId} is being deleted by {username}.");
_logger.LogInformation("Library {LibraryId} is being deleted by {UserName}", libraryId, username);
var series = await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId);
var chapterIds =
await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(series.Select(x => x.Id).ToArray());
@ -226,6 +221,7 @@ namespace API.Controllers
//NOTE: What about normalizing search query and only searching against normalizedname in Series?
// So One Punch would match One-Punch
// This also means less indexes we need.
// TODO: Add indexes of what we are searching on
queryString = queryString.Replace(@"%", "");
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());

View File

@ -6,6 +6,7 @@ using API.DTOs;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
@ -34,15 +35,6 @@ namespace API.Controllers
var chapter = await _cacheService.Ensure(chapterId);
if (chapter == null) return BadRequest("There was an issue finding image file for reading");
// TODO: This code works, but might need bounds checking. UI can send bad data
// if (page >= chapter.Pages)
// {
// page = chapter.Pages - 1;
// } else if (page < 0)
// {
// page = 0;
// }
var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, page);
if (string.IsNullOrEmpty(path)) return BadRequest($"No such image for page {page}");
@ -68,7 +60,7 @@ namespace API.Controllers
public async Task<ActionResult> MarkRead(MarkReadDto markReadDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId); // TODO: Make this async
var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List<AppUserProgress>();
foreach (var volume in volumes)
{

View File

@ -36,7 +36,7 @@ namespace API.Controllers
{
var username = User.GetUsername();
var chapterIds = (await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{seriesId}));
_logger.LogInformation($"Series {seriesId} is being deleted by {username}.");
_logger.LogInformation("Series {SeriesId} is being deleted by {UserName}", seriesId, username);
var result = await _unitOfWork.SeriesRepository.DeleteSeriesAsync(seriesId);
if (result)

View File

@ -21,7 +21,7 @@ namespace API.Controllers
[HttpPost("restart")]
public ActionResult RestartServer()
{
_logger.LogInformation($"{User.GetUsername()} is restarting server from admin dashboard.");
_logger.LogInformation("{UserName} is restarting server from admin dashboard", User.GetUsername());
_applicationLifetime.StopApplication();
return Ok();

View File

@ -35,7 +35,7 @@ namespace API.Controllers
[HttpPost("")]
public async Task<ActionResult<ServerSettingDto>> UpdateSettings(ServerSettingDto updateSettingsDto)
{
_logger.LogInformation($"{User.GetUsername()} is updating Server Settings");
_logger.LogInformation("{UserName} is updating Server Settings", User.GetUsername());
if (updateSettingsDto.CacheDirectory.Equals(string.Empty))
{
@ -72,9 +72,11 @@ namespace API.Controllers
}
}
if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated");
if (_unitOfWork.HasChanges() && await _unitOfWork.Complete())
{
_logger.LogInformation("Server Settings updated.");
_logger.LogInformation("Server Settings updated");
return Ok(updateSettingsDto);
}

View File

@ -9,7 +9,6 @@
public int Height { get; init; }
public string Format { get; init; }
public byte[] Content { get; init; }
//public int Chapter { get; set; }
public string MangaFileName { get; set; }
public bool NeedsSplitting { get; set; }
}

View File

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Constants;
@ -38,7 +39,8 @@ namespace API.Data
new() {Key = ServerSettingKey.CacheDirectory, Value = CacheService.CacheDirectory},
new () {Key = ServerSettingKey.TaskScan, Value = "daily"},
//new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"},
//new () {Key = ServerSettingKey.TaskBackup, Value = "daily"},
new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"},
new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))},
new () {Key = ServerSettingKey.Port, Value = "5000"},
};

View File

@ -1,5 +1,4 @@
using System;
using System.Collections.Generic;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;

View File

@ -1,11 +1,20 @@
namespace API.Entities.Enums
using System.ComponentModel;
namespace API.Entities.Enums
{
public enum ServerSettingKey
{
[Description("TaskScan")]
TaskScan = 0,
[Description("CacheDirectory")]
CacheDirectory = 1,
[Description("TaskBackup")]
TaskBackup = 2,
[Description("LoggingLevel")]
LoggingLevel = 3,
Port = 4
[Description("Port")]
Port = 4,
[Description("BackupDirectory")]
BackupDirectory = 5
}
}

View File

@ -26,6 +26,7 @@ namespace API.Extensions
services.AddScoped<IScannerService, ScannerService>();
services.AddScoped<IArchiveService, ArchiveService>();
services.AddScoped<IMetadataService, MetadataService>();
services.AddScoped<IBackupService, BackupService>();

View File

@ -5,6 +5,7 @@ namespace API.Helpers.Converters
{
public static class CronConverter
{
// TODO: this isn't used. Replace strings with Enums?
public static readonly IEnumerable<string> Options = new []
{
"disabled",

View File

@ -18,10 +18,12 @@ namespace API.Interfaces
/// <param name="userId"></param>
/// <returns></returns>
Task<IEnumerable<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId);
/// <summary>
/// Does not add user information like progress, ratings, etc.
/// </summary>
/// <param name="libraryIds"></param>
/// <param name="searchQuery">Series name to search for</param>
/// <returns></returns>
Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery);
Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId);

View File

@ -2,6 +2,10 @@
{
public interface ITaskScheduler
{
/// <summary>
/// For use on Server startup
/// </summary>
void ScheduleTasks();
void ScanLibrary(int libraryId, bool forceUpdate = false);
void CleanupChapters(int[] chapterIds);
void RefreshMetadata(int libraryId, bool forceUpdate = true);

View File

@ -1,6 +1,6 @@
using System.IO.Compression;
namespace API.Interfaces
namespace API.Interfaces.Services
{
public interface IArchiveService
{

View File

@ -0,0 +1,7 @@
namespace API.Interfaces.Services
{
public interface IBackupService
{
void BackupDatabase();
}
}

View File

@ -1,7 +1,7 @@
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces
namespace API.Interfaces.Services
{
public interface ICacheService
{

View File

@ -2,7 +2,7 @@
using System.Threading.Tasks;
using API.DTOs;
namespace API.Interfaces
namespace API.Interfaces.Services
{
public interface IDirectoryService
{
@ -21,5 +21,11 @@ namespace API.Interfaces
/// <param name="searchPatternExpression"></param>
/// <returns></returns>
string[] GetFiles(string path, string searchPatternExpression = "");
/// <summary>
/// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
bool ExistOrCreate(string directoryPath);
}
}

View File

@ -1,4 +1,4 @@
namespace API.Interfaces
namespace API.Interfaces.Services
{
public interface IScannerService
{

View File

@ -1,7 +1,7 @@
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces
namespace API.Interfaces.Services
{
public interface ITokenService
{

View File

@ -2,6 +2,7 @@ using System;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Interfaces;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore;
@ -40,6 +41,7 @@ namespace API
logger.LogError(ex, "An error occurred during migration");
}
await host.RunAsync();
}

View File

@ -5,6 +5,7 @@ using System.IO.Compression;
using System.Linq;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
using NetVips;

View File

@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Threading.Tasks;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class BackupService : IBackupService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<BackupService> _logger;
private readonly IDirectoryService _directoryService;
private readonly IList<string> _backupFiles = new List<string>()
{
"appsettings.json",
"Hangfire.db",
"Hangfire-log.db",
"kavita.db",
"kavita.db-shm",
"kavita.db-wal",
"kavita.log",
};
public BackupService(IUnitOfWork unitOfWork, ILogger<BackupService> logger, IDirectoryService directoryService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_directoryService = directoryService;
}
public void BackupDatabase()
{
_logger.LogInformation("Beginning backup of Database at {BackupTime}", DateTime.Now);
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
_logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory);
if (!_directoryService.ExistOrCreate(backupDirectory))
{
_logger.LogError("Could not write to {BackupDirectory}; aborting backup", backupDirectory);
return;
}
var fileInfos = _backupFiles.Select(file => new FileInfo(Path.Join(Directory.GetCurrentDirectory(), file))).ToList();
var zipPath = Path.Join(backupDirectory, $"kavita_backup_{DateTime.Now}.zip");
using (var zipArchive = ZipFile.Open(zipPath, ZipArchiveMode.Create))
{
foreach (var fileInfo in fileInfos)
{
zipArchive.CreateEntryFromFile(fileInfo.FullName, fileInfo.Name);
}
}
_logger.LogInformation("Database backup completed");
throw new System.NotImplementedException();
}
}
}

View File

@ -6,6 +6,7 @@ using API.Comparators;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
namespace API.Services
@ -30,11 +31,12 @@ namespace API.Services
public void EnsureCacheDirectory()
{
_logger.LogDebug($"Checking if valid Cache directory: {CacheDirectory}");
// TODO: Replace with DirectoryService.ExistOrCreate()
_logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory);
var di = new DirectoryInfo(CacheDirectory);
if (!di.Exists)
{
_logger.LogError($"Cache directory {CacheDirectory} is not accessible or does not exist. Creating...");
_logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory);
Directory.CreateDirectory(CacheDirectory);
}
}
@ -66,15 +68,15 @@ namespace API.Services
}
catch (Exception ex)
{
_logger.LogError("There was an issue deleting one or more folders/files during cleanup.", ex);
_logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup");
}
_logger.LogInformation("Cache directory purged.");
_logger.LogInformation("Cache directory purged");
}
public void CleanupChapters(int[] chapterIds)
{
_logger.LogInformation($"Running Cache cleanup on Volumes");
_logger.LogInformation("Running Cache cleanup on Volumes");
foreach (var chapter in chapterIds)
{

View File

@ -7,7 +7,7 @@ using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using API.DTOs;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
using NetVips;
@ -49,7 +49,23 @@ namespace API.Services
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path);
}
public bool ExistOrCreate(string directoryPath)
{
var di = new DirectoryInfo(directoryPath);
if (di.Exists) return true;
try
{
Directory.CreateDirectory(directoryPath);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue creating directory: {Directory}", directoryPath);
return false;
}
return true;
}
public IEnumerable<string> ListDirectory(string rootPath)
{
if (!Directory.Exists(rootPath)) return ImmutableList<string>.Empty;
@ -66,7 +82,7 @@ namespace API.Services
{
if (!File.Exists(imagePath))
{
_logger.LogError("Image does not exist on disk.");
_logger.LogError("Image does not exist on disk");
return null;
}
using var image = Image.NewFromFile(imagePath);
@ -82,16 +98,16 @@ namespace API.Services
};
}
/// <summary>
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed
/// up processing.
/// </summary>
/// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param>
/// <exception cref="ArgumentException"></exception>
public static int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern)
/// <summary>
/// Recursively scans files and applies an action on them. This uses as many cores the underlying PC has to speed
/// up processing.
/// </summary>
/// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param>
/// <param name="searchPattern">Regex pattern to search against</param>
/// <exception cref="ArgumentException"></exception>
public static int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern)
{
//Count of files traversed and timer for diagnostic output
var fileCount = 0;
@ -127,9 +143,6 @@ namespace API.Services
}
try {
// TODO: In future, we need to take LibraryType into consideration for what extensions to allow (RAW should allow images)
// or we need to move this filtering to another area (Process)
// or we can get all files and put a check in place during Process to abandon files
files = GetFilesWithCertainExtensions(currentDir, searchPattern)
.ToArray();
}

View File

@ -2,11 +2,9 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using API.Entities;
using API.Entities.Enums;

View File

@ -13,60 +13,81 @@ namespace API.Services
private readonly ICacheService _cacheService;
private readonly ILogger<TaskScheduler> _logger;
private readonly IScannerService _scannerService;
private readonly IUnitOfWork _unitOfWork;
private readonly IMetadataService _metadataService;
private readonly IBackupService _backupService;
public BackgroundJobServer Client => new BackgroundJobServer(new BackgroundJobServerOptions()
{
WorkerCount = 1
});
public BackgroundJobServer Client => new BackgroundJobServer();
// new BackgroundJobServerOptions()
// {
// WorkerCount = 1
// }
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService)
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService)
{
_cacheService = cacheService;
_logger = logger;
_scannerService = scannerService;
_unitOfWork = unitOfWork;
_metadataService = metadataService;
_backupService = backupService;
_logger.LogInformation("Scheduling/Updating cache cleanup on a daily basis.");
var setting = Task.Run(() => unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result;
ScheduleTasks();
//JobStorage.Current.GetMonitoringApi().
}
public void ScheduleTasks()
{
_logger.LogInformation("Scheduling reoccurring tasks");
string setting = null;
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
if (setting != null)
{
RecurringJob.AddOrUpdate(() => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting.Value));
_logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting);
RecurringJob.AddOrUpdate(() => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting));
}
else
{
RecurringJob.AddOrUpdate(() => _cacheService.Cleanup(), Cron.Daily);
RecurringJob.AddOrUpdate(() => _scannerService.ScanLibraries(), Cron.Daily);
}
//JobStorage.Current.GetMonitoringApi().
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
if (setting != null)
{
_logger.LogDebug("Scheduling Backup Task for {Cron}", setting);
RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting2));
}
else
{
RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), Cron.Weekly);
}
RecurringJob.AddOrUpdate(() => _cacheService.Cleanup(), Cron.Daily);
}
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
_logger.LogInformation($"Enqueuing library scan for: {libraryId}");
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
}
public void CleanupChapters(int[] chapterIds)
{
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
}
public void RefreshMetadata(int libraryId, bool forceUpdate = true)
{
_logger.LogInformation($"Enqueuing library metadata refresh for: {libraryId}");
_logger.LogInformation("Enqueuing library metadata refresh for: {LibraryId}", libraryId);
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate)));
}
public void ScanLibraryInternal(int libraryId, bool forceUpdate)
public void BackupDatabase()
{
_scannerService.ScanLibrary(libraryId, forceUpdate);
_metadataService.RefreshMetadata(libraryId, forceUpdate);
}
BackgroundJob.Enqueue(() => _backupService.BackupDatabase());
}
}
}

View File

@ -6,7 +6,7 @@ using System.Security.Claims;
using System.Text;
using System.Threading.Tasks;
using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Configuration;
using Microsoft.IdentityModel.Tokens;

View File

@ -38,6 +38,8 @@ namespace API
{
c.SwaggerDoc("v1", new OpenApiInfo { Title = "API", Version = "v1" });
});
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.