diff --git a/API/API.csproj b/API/API.csproj
index 0f6d90fb6..51bb3becf 100644
--- a/API/API.csproj
+++ b/API/API.csproj
@@ -14,7 +14,6 @@
-
diff --git a/API/DTOs/InProgressChapterDto.cs b/API/DTOs/InProgressChapterDto.cs
index f0a0096ef..08bce3fc6 100644
--- a/API/DTOs/InProgressChapterDto.cs
+++ b/API/DTOs/InProgressChapterDto.cs
@@ -18,6 +18,7 @@
public int SeriesId { get; init; }
public int LibraryId { get; init; }
public string SeriesName { get; init; }
+ public int VolumeId { get; init; }
}
}
\ No newline at end of file
diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs
index 8c1949edc..52be7dac7 100644
--- a/API/Data/SeriesRepository.cs
+++ b/API/Data/SeriesRepository.cs
@@ -6,6 +6,7 @@ using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
using API.Entities;
+using API.Extensions;
using API.Helpers;
using API.Interfaces;
using AutoMapper;
@@ -297,7 +298,7 @@ namespace API.Data
}
///
- ///
+ /// Returns Series that the user
///
///
///
@@ -305,12 +306,12 @@ namespace API.Data
///
public async Task> GetInProgress(int userId, int libraryId, int limit)
{
- //var twoWeeksAgo = DateTime.Today.Subtract(TimeSpan.FromDays(14)); // TODO: Think about moving this to a setting
+ // TODO: Idea: Put Total PagesRead and as return so that we can show a progress bar for full series read progress
var series = await _context.Series
.Join(_context.AppUserProgresses, s => s.Id, progress => progress.SeriesId, (s, progress) => new
{
Series = s,
- progress.PagesRead,
+ PagesRead = _context.AppUserProgresses.Where(s1 => s1.SeriesId == s.Id).Sum(s1 => s1.PagesRead),
progress.AppUserId,
progress.LastModified
})
@@ -320,12 +321,11 @@ namespace API.Data
&& (libraryId <= 0 || s.Series.LibraryId == libraryId) )
.Take(limit)
.OrderByDescending(s => s.LastModified)
- .AsNoTracking()
.Select(s => s.Series)
- .Distinct()
.ProjectTo(_mapper.ConfigurationProvider)
+ .AsNoTracking()
.ToListAsync();
- return series;
+ return series.DistinctBy(s => s.Name);
}
}
}
\ No newline at end of file
diff --git a/API/Data/VolumeRepository.cs b/API/Data/VolumeRepository.cs
index 35119efa8..f0e183805 100644
--- a/API/Data/VolumeRepository.cs
+++ b/API/Data/VolumeRepository.cs
@@ -100,42 +100,112 @@ namespace API.Data
///
public async Task> GetContinueReading(int userId, int libraryId, int limit)
{
+ /** TODO: Fix this SQL
+ * SELECT * FROM
+ (
+ SELECT * FROM Chapter C WHERE C.VolumeId IN (SELECT Id from Volume where SeriesId = 1912)
+ ) C INNER JOIN AppUserProgresses AUP ON AUP.ChapterId = C.Id
+ INNER JOIN Series S ON AUP.SeriesId = S.Id
+ WHERE AUP.AppUserId = 1 AND AUP.PagesRead < C.Pages
+ */
_logger.LogInformation("Get Continue Reading");
- var chapters = await _context.Chapter
- .Join(_context.AppUserProgresses, c => c.Id, p => p.ChapterId,
- (chapter, progress) =>
- new
- {
- Chapter = chapter,
- Progress = progress
- })
- .Join(_context.Series, arg => arg.Progress.SeriesId, series => series.Id, (arg, series) =>
+ var volumeQuery = _context.Volume
+ .Join(_context.AppUserProgresses, v => v.Id, aup => aup.VolumeId, (volume, progress) => new
+ {
+ volume,
+ progress
+ })
+ .Where(arg => arg.volume.SeriesId == arg.progress.SeriesId && arg.progress.AppUserId == userId)
+ .AsNoTracking()
+ .Select(arg => new
+ {
+ VolumeId = arg.volume.Id,
+ VolumeNumber = arg.volume.Number
+ }); // I think doing a join on this would be better
+
+ var volumeIds = (await volumeQuery.ToListAsync()).Select(s => s.VolumeId);
+
+ var chapters2 = await _context.Chapter.Where(c => volumeIds.Contains(c.VolumeId))
+ .Join(_context.AppUserProgresses, chapter => chapter.Id, aup => aup.ChapterId, (chapter, progress) =>
new
{
- arg.Chapter,
- arg.Progress,
- Series = series
+ chapter,
+ progress
})
- .AsNoTracking()
- .Where(arg => arg.Progress.AppUserId == userId
- && arg.Progress.PagesRead < arg.Chapter.Pages)
+ .Join(_context.Series, arg => arg.progress.SeriesId, s => s.Id, (arg, series) => new
+ {
+ Chapter = arg.chapter,
+ Progress = arg.progress,
+ Series = series
+ })
+ .Where(o => o.Progress.AppUserId == userId && o.Progress.PagesRead < o.Series.Pages)
+ .Select(arg => new
+ {
+ Chapter = arg.Chapter,
+ Progress = arg.Progress,
+ SeriesId = arg.Series.Id,
+ SeriesName = arg.Series.Name,
+ LibraryId = arg.Series.LibraryId,
+ TotalPages = arg.Series.Pages
+ })
.OrderByDescending(d => d.Progress.LastModified)
.Take(limit)
.ToListAsync();
- return chapters
+ return chapters2
.OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer())
- .DistinctBy(p => p.Series.Id)
+ .DistinctBy(p => p.SeriesId)
.Select(arg => new InProgressChapterDto()
{
Id = arg.Chapter.Id,
Number = arg.Chapter.Number,
Range = arg.Chapter.Range,
SeriesId = arg.Progress.SeriesId,
- SeriesName = arg.Series.Name,
- LibraryId = arg.Series.LibraryId,
+ SeriesName = arg.SeriesName,
+ LibraryId = arg.LibraryId,
Pages = arg.Chapter.Pages,
+ VolumeId = arg.Chapter.VolumeId
});
+
+
+
+ // var chapters = await _context.Chapter
+ // .Join(_context.AppUserProgresses, c => c.Id, p => p.ChapterId,
+ // (chapter, progress) =>
+ // new
+ // {
+ // Chapter = chapter,
+ // Progress = progress
+ // })
+ // .Join(_context.Series, arg => arg.Progress.SeriesId, series => series.Id, (arg, series) =>
+ // new
+ // {
+ // arg.Chapter,
+ // arg.Progress,
+ // Series = series,
+ // VolumeIds = _context.Volume.Where(v => v.SeriesId == series.Id).Select(s => s.Id).ToList()
+ // })
+ // .AsNoTracking()
+ // .Where(arg => arg.Progress.AppUserId == userId
+ // && arg.Progress.PagesRead < arg.Chapter.Pages
+ // && arg.VolumeIds.Contains(arg.Progress.VolumeId))
+ // .OrderByDescending(d => d.Progress.LastModified)
+ // .Take(limit)
+ // .ToListAsync();
+
+ // return chapters
+ // .OrderBy(c => float.Parse(c.Chapter.Number), new ChapterSortComparer())
+ // .DistinctBy(p => p.Series.Id)
+ // .Select(arg => new InProgressChapterDto()
+ // {
+ // Id = arg.Chapter.Id,
+ // Number = arg.Chapter.Number,
+ // Range = arg.Chapter.Range,
+ // SeriesId = arg.Progress.SeriesId,
+ // SeriesName = arg.Series.Name,
+ // LibraryId = arg.Series.LibraryId,
+ // Pages = arg.Chapter.Pages,
+ // });
}
}
}
\ No newline at end of file
diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs
index 6c5d4ec34..a9be9e0cb 100644
--- a/API/Extensions/ApplicationServiceExtensions.cs
+++ b/API/Extensions/ApplicationServiceExtensions.cs
@@ -5,9 +5,6 @@ using API.Interfaces.Services;
using API.Services;
using API.Services.Tasks;
using AutoMapper;
-using Hangfire;
-using Hangfire.LiteDB;
-using Hangfire.MemoryStorage;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
diff --git a/API/Interfaces/Services/IBackupService.cs b/API/Interfaces/Services/IBackupService.cs
index 0f46a77c9..eaa140e46 100644
--- a/API/Interfaces/Services/IBackupService.cs
+++ b/API/Interfaces/Services/IBackupService.cs
@@ -13,5 +13,7 @@ namespace API.Interfaces.Services
///
///
IEnumerable LogFiles(int maxRollingFiles, string logFileName);
+
+ void CleanupBackups();
}
}
\ No newline at end of file
diff --git a/API/Interfaces/Services/IDirectoryService.cs b/API/Interfaces/Services/IDirectoryService.cs
index c4b15b94b..1437df69b 100644
--- a/API/Interfaces/Services/IDirectoryService.cs
+++ b/API/Interfaces/Services/IDirectoryService.cs
@@ -42,6 +42,7 @@ namespace API.Interfaces.Services
void ClearDirectory(string directoryPath);
bool CopyFilesToDirectory(IEnumerable filePaths, string directoryPath);
+ bool Exists(string directory);
IEnumerable GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly);
diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs
index 8e6ede340..549139fe4 100644
--- a/API/Services/CacheService.cs
+++ b/API/Services/CacheService.cs
@@ -32,7 +32,7 @@ namespace API.Services
public void EnsureCacheDirectory()
{
_logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory);
- if (_directoryService.ExistOrCreate(CacheDirectory))
+ if (!_directoryService.ExistOrCreate(CacheDirectory))
{
_logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory);
}
diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs
index f1f4f5eb1..6ae953802 100644
--- a/API/Services/DirectoryService.cs
+++ b/API/Services/DirectoryService.cs
@@ -39,7 +39,13 @@ namespace API.Services
.Where(file =>
reSearchPattern.IsMatch(Path.GetExtension(file)));
}
-
+
+ public bool Exists(string directory)
+ {
+ var di = new DirectoryInfo(directory);
+ return di.Exists;
+ }
+
public IEnumerable GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly)
{
diff --git a/API/Services/Tasks/BackupService.cs b/API/Services/Tasks/BackupService.cs
index 9642a0faa..ee8b3cf89 100644
--- a/API/Services/Tasks/BackupService.cs
+++ b/API/Services/Tasks/BackupService.cs
@@ -99,6 +99,54 @@ namespace API.Services.Tasks
_directoryService.ClearAndDeleteDirectory(tempDirectory);
_logger.LogInformation("Database backup completed");
}
+
+ ///
+ /// Removes Database backups older than 30 days. If all backups are older than 30 days, the latest is kept.
+ ///
+ public void CleanupBackups()
+ {
+ const int dayThreshold = 30;
+ _logger.LogInformation("Beginning cleanup of Database backups at {Time}", DateTime.Now);
+ var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
+ if (!_directoryService.Exists(backupDirectory)) return;
+ var deltaTime = DateTime.Today.Subtract(TimeSpan.FromDays(dayThreshold));
+ var allBackups = _directoryService.GetFiles(backupDirectory).ToList();
+ var expiredBackups = allBackups.Select(filename => new FileInfo(filename))
+ .Where(f => f.CreationTime > deltaTime)
+ .ToList();
+ if (expiredBackups.Count == allBackups.Count)
+ {
+ _logger.LogInformation("All expired backups are older than {Threshold} days. Removing all but last backup", dayThreshold);
+ var toDelete = expiredBackups.OrderByDescending(f => f.CreationTime).ToList();
+ for (var i = 1; i < toDelete.Count; i++)
+ {
+ try
+ {
+ toDelete[i].Delete();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was an issue deleting {FileName}", toDelete[i].Name);
+ }
+ }
+ }
+ else
+ {
+ foreach (var file in expiredBackups)
+ {
+ try
+ {
+ file.Delete();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was an issue deleting {FileName}", file.Name);
+ }
+ }
+
+ }
+ _logger.LogInformation("Finished cleanup of Database backups at {Time}", DateTime.Now);
+ }
}
}
\ No newline at end of file
diff --git a/API/Services/Tasks/CleanupService.cs b/API/Services/Tasks/CleanupService.cs
index a33cf746f..a98e33cbf 100644
--- a/API/Services/Tasks/CleanupService.cs
+++ b/API/Services/Tasks/CleanupService.cs
@@ -13,12 +13,14 @@ namespace API.Services.Tasks
private readonly ICacheService _cacheService;
private readonly IDirectoryService _directoryService;
private readonly ILogger _logger;
+ private readonly IBackupService _backupService;
- public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger logger)
+ public CleanupService(ICacheService cacheService, IDirectoryService directoryService, ILogger logger, IBackupService backupService)
{
_cacheService = cacheService;
_directoryService = directoryService;
_logger = logger;
+ _backupService = backupService;
}
[AutomaticRetry(Attempts = 3, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Fail)]
@@ -29,7 +31,9 @@ namespace API.Services.Tasks
_directoryService.ClearDirectory(tempDirectory);
_logger.LogInformation("Cleaning cache directory");
_cacheService.Cleanup();
-
+ _logger.LogInformation("Cleaning old database backups");
+ _backupService.CleanupBackups();
+
}
}
}
\ No newline at end of file
diff --git a/API/Startup.cs b/API/Startup.cs
index c9a6a8eca..997954016 100644
--- a/API/Startup.cs
+++ b/API/Startup.cs
@@ -6,7 +6,6 @@ using API.Interfaces.Services;
using API.Middleware;
using API.Services;
using Hangfire;
-using Hangfire.LiteDB;
using Hangfire.MemoryStorage;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
@@ -65,20 +64,10 @@ namespace API
services.AddResponseCaching();
- if (_env.IsDevelopment())
- {
- services.AddHangfire(configuration => configuration
- .UseSimpleAssemblyNameTypeSerializer()
- .UseRecommendedSerializerSettings()
- .UseMemoryStorage());
- }
- else
- {
- services.AddHangfire(configuration => configuration
- .UseSimpleAssemblyNameTypeSerializer()
- .UseRecommendedSerializerSettings()
- .UseLiteDbStorage());
- }
+ services.AddHangfire(configuration => configuration
+ .UseSimpleAssemblyNameTypeSerializer()
+ .UseRecommendedSerializerSettings()
+ .UseMemoryStorage());
// Add the processing server as IHostedService
services.AddHangfireServer();
@@ -132,7 +121,7 @@ namespace API
MaxAge = TimeSpan.FromSeconds(10)
};
context.Response.Headers[Microsoft.Net.Http.Headers.HeaderNames.Vary] =
- new string[] { "Accept-Encoding" };
+ new[] { "Accept-Encoding" };
await next();
});