Merge pull request #88 from Kareadita/feature/database-task

Clear Database entries after 30 days
This commit is contained in:
Joseph Milazzo 2021-03-20 12:55:10 -05:00 committed by GitHub
commit 16bc83b3c3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 166 additions and 49 deletions

View File

@ -14,7 +14,6 @@
<PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="8.1.0" /> <PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="8.1.0" />
<PackageReference Include="Hangfire" Version="1.7.18" /> <PackageReference Include="Hangfire" Version="1.7.18" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.18" /> <PackageReference Include="Hangfire.AspNetCore" Version="1.7.18" />
<PackageReference Include="Hangfire.LiteDB" Version="0.4.0" />
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" /> <PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
<PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" /> <PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.1" NoWarn="NU1605" /> <PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.1" NoWarn="NU1605" />

View File

@ -18,6 +18,7 @@
public int SeriesId { get; init; } public int SeriesId { get; init; }
public int LibraryId { get; init; } public int LibraryId { get; init; }
public string SeriesName { get; init; } public string SeriesName { get; init; }
public int VolumeId { get; init; }
} }
} }

View File

@ -6,6 +6,7 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces; using API.Interfaces;
using AutoMapper; using AutoMapper;
@ -297,7 +298,7 @@ namespace API.Data
} }
/// <summary> /// <summary>
/// /// Returns Series that the user
/// </summary> /// </summary>
/// <param name="userId"></param> /// <param name="userId"></param>
/// <param name="libraryId"></param> /// <param name="libraryId"></param>
@ -305,12 +306,12 @@ namespace API.Data
/// <returns></returns> /// <returns></returns>
public async Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, int limit) public async Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, int limit)
{ {
//var twoWeeksAgo = DateTime.Today.Subtract(TimeSpan.FromDays(14)); // TODO: Think about moving this to a setting // TODO: Idea: Put Total PagesRead and as return so that we can show a progress bar for full series read progress
var series = await _context.Series var series = await _context.Series
.Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new .Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new
{ {
Series = s, Series = s,
progress.PagesRead, PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id).Sum(s1 => s1.PagesRead),
progress.AppUserId, progress.AppUserId,
progress.LastModified progress.LastModified
}) })
@ -320,12 +321,11 @@ namespace API.Data
&& (libraryId <= 0 || s.Series.LibraryId == libraryId) ) && (libraryId <= 0 || s.Series.LibraryId == libraryId) )
.Take(limit) .Take(limit)
.OrderByDescending(s => s.LastModified) .OrderByDescending(s => s.LastModified)
.AsNoTracking()
.Select(s => s.Series) .Select(s => s.Series)
.Distinct()
.ProjectTo<SeriesDto>(_mapper.ConfigurationProvider) .ProjectTo<SeriesDto>(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync(); .ToListAsync();
return series; return series.DistinctBy(s => s.Name);
} }
} }
} }

View File

@ -100,42 +100,112 @@ namespace API.Data
/// <returns></returns> /// <returns></returns>
public async Task<IEnumerable<InProgressChapterDto>> GetContinueReading(int userId, int libraryId, int limit) public async Task<IEnumerable<InProgressChapterDto>> GetContinueReading(int userId, int libraryId, int limit)
{ {
/** TODO: Fix this SQL
* SELECT * FROM
(
SELECT * FROM Chapter C WHERE C.VolumeId IN (SELECT Id from Volume where SeriesId = 1912)
) C INNER JOIN AppUserProgresses AUP ON AUP.ChapterId = C.Id
INNER JOIN Series S ON AUP.SeriesId = S.Id
WHERE AUP.AppUserId = 1 AND AUP.PagesRead < C.Pages
*/
_logger.LogInformation("Get Continue Reading"); _logger.LogInformation("Get Continue Reading");
var chapters = await _context.Chapter var volumeQuery = _context.Volume
.Join(_context.AppUserProgresses, c => c.Id, p => p.ChapterId, .Join(_context.AppUserProgresses, v => v.Id, aup => aup.VolumeId, (volume, progress) => new
(chapter, progress) => {
new volume,
{ progress
Chapter = chapter, })
Progress = progress .Where(arg => arg.volume.SeriesId == arg.progress.SeriesId && arg.progress.AppUserId == userId)
}) .AsNoTracking()
.Join(_context.Series, arg => arg.Progress.SeriesId, series => series.Id, (arg, series) => .Select(arg => new
{
VolumeId = arg.volume.Id,
VolumeNumber = arg.volume.Number
}); // I think doing a join on this would be better
var volumeIds = (await volumeQuery.ToListAsync()).Select(s => s.VolumeId);
var chapters2 = await _context.Chapter.Where(c => volumeIds.Contains(c.VolumeId))
.Join(_context.AppUserProgresses, chapter => chapter.Id, aup => aup.ChapterId, (chapter, progress) =>
new new
{ {
arg.Chapter, chapter,
arg.Progress, progress
Series = series
}) })
.AsNoTracking() .Join(_context.Series, arg => arg.progress.SeriesId, s => s.Id, (arg, series) => new
.Where(arg => arg.Progress.AppUserId == userId {
&& arg.Progress.PagesRead < arg.Chapter.Pages) Chapter = arg.chapter,
Progress = arg.progress,
Series = series
})
.Where(o => o.Progress.AppUserId == userId && o.Progress.PagesRead < o.Series.Pages)
.Select(arg => new
{
Chapter = arg.Chapter,
Progress = arg.Progress,
SeriesId = arg.Series.Id,
SeriesName = arg.Series.Name,
LibraryId = arg.Series.LibraryId,
TotalPages = arg.Series.Pages
})
.OrderByDescending(d => d.Progress.LastModified) .OrderByDescending(d => d.Progress.LastModified)
.Take(limit) .Take(limit)
.ToListAsync(); .ToListAsync();
return chapters return chapters2
.OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer()) .OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer())
.DistinctBy(p => p.Series.Id) .DistinctBy(p => p.SeriesId)
.Select(arg => new InProgressChapterDto() .Select(arg => new InProgressChapterDto()
{ {
Id = arg.Chapter.Id, Id = arg.Chapter.Id,
Number = arg.Chapter.Number, Number = arg.Chapter.Number,
Range = arg.Chapter.Range, Range = arg.Chapter.Range,
SeriesId = arg.Progress.SeriesId, SeriesId = arg.Progress.SeriesId,
SeriesName = arg.Series.Name, SeriesName = arg.SeriesName,
LibraryId = arg.Series.LibraryId, LibraryId = arg.LibraryId,
Pages = arg.Chapter.Pages, Pages = arg.Chapter.Pages,
VolumeId = arg.Chapter.VolumeId
}); });
// var chapters = await _context.Chapter
// .Join(_context.AppUserProgresses, c => c.Id, p => p.ChapterId,
// (chapter, progress) =>
// new
// {
// Chapter = chapter,
// Progress = progress
// })
// .Join(_context.Series, arg => arg.Progress.SeriesId, series => series.Id, (arg, series) =>
// new
// {
// arg.Chapter,
// arg.Progress,
// Series = series,
// VolumeIds = _context.Volume.Where(v => v.SeriesId == series.Id).Select(s => s.Id).ToList()
// })
// .AsNoTracking()
// .Where(arg => arg.Progress.AppUserId == userId
// && arg.Progress.PagesRead < arg.Chapter.Pages
// && arg.VolumeIds.Contains(arg.Progress.VolumeId))
// .OrderByDescending(d => d.Progress.LastModified)
// .Take(limit)
// .ToListAsync();
// return chapters
// .OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer())
// .DistinctBy(p => p.Series.Id)
// .Select(arg => new InProgressChapterDto()
// {
// Id = arg.Chapter.Id,
// Number = arg.Chapter.Number,
// Range = arg.Chapter.Range,
// SeriesId = arg.Progress.SeriesId,
// SeriesName = arg.Series.Name,
// LibraryId = arg.Series.LibraryId,
// Pages = arg.Chapter.Pages,
// });
} }
} }
} }

View File

@ -5,9 +5,6 @@ using API.Interfaces.Services;
using API.Services; using API.Services;
using API.Services.Tasks; using API.Services.Tasks;
using AutoMapper; using AutoMapper;
using Hangfire;
using Hangfire.LiteDB;
using Hangfire.MemoryStorage;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;

View File

@ -13,5 +13,7 @@ namespace API.Interfaces.Services
/// <param name="logFileName"></param> /// <param name="logFileName"></param>
/// <returns></returns> /// <returns></returns>
IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName); IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName);
void CleanupBackups();
} }
} }

View File

@ -42,6 +42,7 @@ namespace API.Interfaces.Services
void ClearDirectory(string directoryPath); void ClearDirectory(string directoryPath);
bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath); bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath);
bool Exists(string directory);
IEnumerable<string> GetFiles(string path, string searchPatternExpression = "", IEnumerable<string> GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly); SearchOption searchOption = SearchOption.TopDirectoryOnly);

View File

@ -32,7 +32,7 @@ namespace API.Services
public void EnsureCacheDirectory() public void EnsureCacheDirectory()
{ {
_logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory); _logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory);
if (_directoryService.ExistOrCreate(CacheDirectory)) if (!_directoryService.ExistOrCreate(CacheDirectory))
{ {
_logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory); _logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory);
} }

View File

@ -39,7 +39,13 @@ namespace API.Services
.Where(file => .Where(file =>
reSearchPattern.IsMatch(Path.GetExtension(file))); reSearchPattern.IsMatch(Path.GetExtension(file)));
} }
public bool Exists(string directory)
{
var di = new DirectoryInfo(directory);
return di.Exists;
}
public IEnumerable<string> GetFiles(string path, string searchPatternExpression = "", public IEnumerable<string> GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly) SearchOption searchOption = SearchOption.TopDirectoryOnly)
{ {

View File

@ -99,6 +99,54 @@ namespace API.Services.Tasks
_directoryService.ClearAndDeleteDirectory(tempDirectory); _directoryService.ClearAndDeleteDirectory(tempDirectory);
_logger.LogInformation("Database backup completed"); _logger.LogInformation("Database backup completed");
} }
/// <summary>
/// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept.
/// </summary>
public void CleanupBackups()
{
const int dayThreshold = 30;
_logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now);
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
if (!_directoryService.Exists(backupDirectory)) return;
var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold));
var allBackups = _directoryService.GetFiles(backupDirectory).ToList();
var expiredBackups = allBackups.Select(filename => new FileInfo(filename))
.Where(f => f.CreationTime > deltaTime)
.ToList();
if (expiredBackups.Count == allBackups.Count)
{
_logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold);
var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList();
for (var i = 1; i < toDelete.Count; i++)
{
try
{
toDelete[i].Delete();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting {FileName}", toDelete[i].Name);
}
}
}
else
{
foreach (var file in expiredBackups)
{
try
{
file.Delete();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting {FileName}", file.Name);
}
}
}
_logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now);
}
} }
} }

View File

@ -13,12 +13,14 @@ namespace API.Services.Tasks
private readonly ICacheService _cacheService; private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly ILogger<CleanupService> _logger; private readonly ILogger<CleanupService> _logger;
private readonly IBackupService _backupService;
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger) public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger, IBackupService backupService)
{ {
_cacheService = cacheService; _cacheService = cacheService;
_directoryService = directoryService; _directoryService = directoryService;
_logger = logger; _logger = logger;
_backupService = backupService;
} }
[AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)] [AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)]
@ -29,7 +31,9 @@ namespace API.Services.Tasks
_directoryService.ClearDirectory(tempDirectory); _directoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory"); _logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup(); _cacheService.Cleanup();
_logger.LogInformation("Cleaning old database backups");
_backupService.CleanupBackups();
} }
} }
} }

View File

@ -6,7 +6,6 @@ using API.Interfaces.Services;
using API.Middleware; using API.Middleware;
using API.Services; using API.Services;
using Hangfire; using Hangfire;
using Hangfire.LiteDB;
using Hangfire.MemoryStorage; using Hangfire.MemoryStorage;
using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
@ -65,20 +64,10 @@ namespace API
services.AddResponseCaching(); services.AddResponseCaching();
if (_env.IsDevelopment()) services.AddHangfire(configuration => configuration
{ .UseSimpleAssemblyNameTypeSerializer()
services.AddHangfire(configuration => configuration .UseRecommendedSerializerSettings()
.UseSimpleAssemblyNameTypeSerializer() .UseMemoryStorage());
.UseRecommendedSerializerSettings()
.UseMemoryStorage());
}
else
{
services.AddHangfire(configuration => configuration
.UseSimpleAssemblyNameTypeSerializer()
.UseRecommendedSerializerSettings()
.UseLiteDbStorage());
}
// Add the processing server as IHostedService // Add the processing server as IHostedService
services.AddHangfireServer(); services.AddHangfireServer();
@ -132,7 +121,7 @@ namespace API
MaxAge = TimeSpan.FromSeconds(10) MaxAge = TimeSpan.FromSeconds(10)
}; };
context.Response.Headers[Microsoft.Net.Http.Headers.HeaderNames.Vary] = context.Response.Headers[Microsoft.Net.Http.Headers.HeaderNames.Vary] =
new string[] { "Accept-Encoding" }; new[] { "Accept-Encoding" };
await next(); await next();
}); });