Fixed GetInProgress API - The condition for PagesRead needed to be a sum for all progress, not each row.

This commit is contained in:
Joseph Milazzo 2021-03-18 14:18:56 -05:00
parent b8cd04e1de
commit a125b2ac0b
7 changed files with 69 additions and 12 deletions

View File

@ -6,6 +6,7 @@ using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Interfaces;
using AutoMapper;
@ -297,7 +298,7 @@ namespace API.Data
}
/// <summary>
///
/// Returns Series that the user
/// </summary>
/// <param name="userId"></param>
/// <param name="libraryId"></param>
@ -305,12 +306,11 @@ namespace API.Data
/// <returns></returns>
public async Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, int limit)
{
//var twoWeeksAgo = DateTime.Today.Subtract(TimeSpan.FromDays(14)); // TODO: Think about moving this to a setting
var series = await _context.Series
.Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new
{
Series = s,
progress.PagesRead,
PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id).Sum(s1 => s1.PagesRead),
progress.AppUserId,
progress.LastModified
})
@ -320,12 +320,11 @@ namespace API.Data
&& (libraryId <= 0 || s.Series.LibraryId == libraryId) )
.Take(limit)
.OrderByDescending(s => s.LastModified)
.AsNoTracking()
.Select(s => s.Series)
.Distinct()
.ProjectTo<SeriesDto>(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync();
return series;
return series.DistinctBy(s => s.Name);
}
}
}

View File

@ -5,9 +5,6 @@ using API.Interfaces.Services;
using API.Services;
using API.Services.Tasks;
using AutoMapper;
using Hangfire;
using Hangfire.LiteDB;
using Hangfire.MemoryStorage;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;

View File

@ -13,5 +13,7 @@ namespace API.Interfaces.Services
/// <param name="logFileName"></param>
/// <returns></returns>
IEnumerable<string> LogFiles(int maxRollingFiles, string logFileName);
void CleanupBackups();
}
}

View File

@ -42,6 +42,7 @@ namespace API.Interfaces.Services
void ClearDirectory(string directoryPath);
bool CopyFilesToDirectory(IEnumerable<string> filePaths, string directoryPath);
bool Exists(string directory);
IEnumerable<string> GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly);

View File

@ -40,6 +40,12 @@ namespace API.Services
reSearchPattern.IsMatch(Path.GetExtension(file)));
}
public bool Exists(string directory)
{
var di = new DirectoryInfo(directory);
return di.Exists;
}
public IEnumerable<string> GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly)
{

View File

@ -100,5 +100,53 @@ namespace API.Services.Tasks
_logger.LogInformation("Database backup completed");
}
/// <summary>
/// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept.
/// </summary>
public void CleanupBackups()
{
const int dayThreshold = 30;
_logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now);
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
if (!_directoryService.Exists(backupDirectory)) return;
var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold));
var allBackups = _directoryService.GetFiles(backupDirectory).ToList();
var expiredBackups = allBackups.Select(filename => new FileInfo(filename))
.Where(f => f.CreationTime > deltaTime)
.ToList();
if (expiredBackups.Count == allBackups.Count)
{
_logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold);
var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList();
for (var i = 1; i < toDelete.Count; i++)
{
try
{
toDelete[i].Delete();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting {FileName}", toDelete[i].Name);
}
}
}
else
{
foreach (var file in expiredBackups)
{
try
{
file.Delete();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue deleting {FileName}", file.Name);
}
}
}
_logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now);
}
}
}

View File

@ -13,12 +13,14 @@ namespace API.Services.Tasks
private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService;
private readonly ILogger<CleanupService> _logger;
private readonly IBackupService _backupService;
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger)
public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger<CleanupService> logger, IBackupService backupService)
{
_cacheService = cacheService;
_directoryService = directoryService;
_logger = logger;
_backupService = backupService;
}
[AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)]
@ -29,6 +31,8 @@ namespace API.Services.Tasks
_directoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup();
_logger.LogInformation("Cleaning old database backups");
_backupService.CleanupBackups();
}
}