diff --git a/API/Controllers/MetadataController.cs b/API/Controllers/MetadataController.cs index c1372419d..076f7d4d9 100644 --- a/API/Controllers/MetadataController.cs +++ b/API/Controllers/MetadataController.cs @@ -207,7 +207,7 @@ public class MetadataController(IUnitOfWork unitOfWork, ILocalizationService loc if (user == null) return Unauthorized(); var userReviews = (await unitOfWork.UserRepository.GetUserRatingDtosForSeriesAsync(seriesId, user.Id)) - .Where(r => !string.IsNullOrEmpty(r.BodyJustText)) + .Where(r => !string.IsNullOrEmpty(r.Body)) .OrderByDescending(review => review.Username.Equals(user.UserName) ? 1 : 0) .ToList(); @@ -221,12 +221,22 @@ public class MetadataController(IUnitOfWork unitOfWork, ILocalizationService loc } var ret = await metadataService.GetSeriesDetail(user.Id, seriesId); - if (ret == null) return Ok(new SeriesDetailPlusDto() + if (ret == null) { - Reviews = userReviews, - Recommendations = null, - Ratings = null - }); + // Cache an empty result, so we don't constantly hit K+ when we know nothing is going to resolve + ret = new SeriesDetailPlusDto() + { + Reviews = new List(), + Recommendations = null, + Ratings = null + }; + await _cacheProvider.SetAsync(cacheKey, ret, TimeSpan.FromHours(48)); + + var newCacheResult2 = (await _cacheProvider.GetAsync(cacheKey)).Value; + await PrepareSeriesDetail(userReviews, newCacheResult2, user); + + return Ok(newCacheResult2); + } await _cacheProvider.SetAsync(cacheKey, ret, TimeSpan.FromHours(48)); @@ -244,7 +254,7 @@ public class MetadataController(IUnitOfWork unitOfWork, ILocalizationService loc userReviews.AddRange(ReviewService.SelectSpectrumOfReviews(ret.Reviews.ToList())); ret.Reviews = userReviews; - if (!isAdmin) + if (!isAdmin && ret.Recommendations != null) { // Re-obtain owned series and take into account age restriction ret.Recommendations.OwnedSeries = diff --git a/API/Data/ManualMigrations/MigrateClearNightlyExternalSeriesRecords.cs b/API/Data/ManualMigrations/MigrateClearNightlyExternalSeriesRecords.cs new file mode 100644 index 000000000..9eff55bc1 --- /dev/null +++ b/API/Data/ManualMigrations/MigrateClearNightlyExternalSeriesRecords.cs @@ -0,0 +1,42 @@ +using System; +using System.Threading.Tasks; +using API.Entities; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Data.ManualMigrations; + +/// +/// For the v0.7.14 release, one of the nightlies had bad data that would cause issues. This drops those records +/// +public static class MigrateClearNightlyExternalSeriesRecords +{ + public static async Task Migrate(DataContext dataContext, ILogger logger) + { + if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateClearNightlyExternalSeriesRecords")) + { + return; + } + + logger.LogCritical( + "Running MigrateClearNightlyExternalSeriesRecords migration - Please be patient, this may take some time. This is not an error"); + + dataContext.ExternalSeriesMetadata.RemoveRange(dataContext.ExternalSeriesMetadata); + dataContext.ExternalRating.RemoveRange(dataContext.ExternalRating); + dataContext.ExternalRecommendation.RemoveRange(dataContext.ExternalRecommendation); + dataContext.ExternalReview.RemoveRange(dataContext.ExternalReview); + + dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory() + { + Name = "MigrateClearNightlyExternalSeriesRecords", + ProductVersion = BuildInfo.Version.ToString(), + RanAt = DateTime.UtcNow + }); + + await dataContext.SaveChangesAsync(); + + logger.LogCritical( + "Running MigrateClearNightlyExternalSeriesRecords migration - Completed. This is not an error"); + } +} diff --git a/API/Data/ManualMigrations/MigrateWantToReadExport.cs b/API/Data/ManualMigrations/MigrateWantToReadExport.cs index 94067191d..1797a3b1d 100644 --- a/API/Data/ManualMigrations/MigrateWantToReadExport.cs +++ b/API/Data/ManualMigrations/MigrateWantToReadExport.cs @@ -21,37 +21,19 @@ public static class MigrateWantToReadExport logger.LogCritical( "Running MigrateWantToReadExport migration - Please be patient, this may take some time. This is not an error"); - var columnExists = false; - await using var command = dataContext.Database.GetDbConnection().CreateCommand(); - command.CommandText = "PRAGMA table_info('Series')"; - - await dataContext.Database.OpenConnectionAsync(); - await using var result = await command.ExecuteReaderAsync(); - while (await result.ReadAsync()) - { - var columnName = result["name"].ToString(); - if (columnName != "AppUserId") continue; - - logger.LogInformation("Column 'AppUserId' exists in the 'Series' table. Running migration..."); - // Your migration logic here - columnExists = true; - break; - } - - await result.CloseAsync(); - - if (!columnExists) + var importFile = Path.Join(directoryService.ConfigDirectory, "want-to-read-migration.csv"); + if (File.Exists(importFile)) { logger.LogCritical( "Running MigrateWantToReadExport migration - Completed. This is not an error"); return; } - await using var command2 = dataContext.Database.GetDbConnection().CreateCommand(); + await using var command = dataContext.Database.GetDbConnection().CreateCommand(); command.CommandText = "Select AppUserId, Id from Series WHERE AppUserId IS NOT NULL ORDER BY AppUserId;"; await dataContext.Database.OpenConnectionAsync(); - await using var result2 = await command.ExecuteReaderAsync(); + await using var result = await command.ExecuteReaderAsync(); await using var writer = new StreamWriter(Path.Join(directoryService.ConfigDirectory, "want-to-read-migration.csv")); await using var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture); @@ -62,10 +44,10 @@ public static class MigrateWantToReadExport await csvWriter.NextRecordAsync(); // Write data - while (await result2.ReadAsync()) + while (await result.ReadAsync()) { - var appUserId = result2["AppUserId"].ToString(); - var id = result2["Id"].ToString(); + var appUserId = result["AppUserId"].ToString(); + var id = result["Id"].ToString(); csvWriter.WriteField(appUserId); csvWriter.WriteField(id); @@ -75,7 +57,7 @@ public static class MigrateWantToReadExport try { - await result2.CloseAsync(); + await dataContext.Database.CloseConnectionAsync(); writer.Close(); } catch (Exception) {/* Swallow */} diff --git a/API/Data/Repositories/VolumeRepository.cs b/API/Data/Repositories/VolumeRepository.cs index f6bb9ff1a..53a45a946 100644 --- a/API/Data/Repositories/VolumeRepository.cs +++ b/API/Data/Repositories/VolumeRepository.cs @@ -241,7 +241,9 @@ public class VolumeRepository : IVolumeRepository c.LastReadingProgress = progresses.Max(p => p.LastModified); } - v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead); + v.PagesRead = userProgress + .Where(p => p.VolumeId == v.Id) + .Sum(p => p.PagesRead); } } } diff --git a/API/Startup.cs b/API/Startup.cs index f7b37f0c3..86bee1bca 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -250,6 +250,7 @@ public class Startup await MigrateVolumeNumber.Migrate(unitOfWork, dataContext, logger); await MigrateWantToReadImport.Migrate(unitOfWork, directoryService, logger); await MigrateManualHistory.Migrate(dataContext, logger); + await MigrateClearNightlyExternalSeriesRecords.Migrate(dataContext, logger); // Update the version in the DB after all migrations are run var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion); diff --git a/openapi.json b/openapi.json index 9a9b31319..b15b30d03 100644 --- a/openapi.json +++ b/openapi.json @@ -7,7 +7,7 @@ "name": "GPL-3.0", "url": "https://github.com/Kareadita/Kavita/blob/develop/LICENSE" }, - "version": "0.7.13.13" + "version": "0.7.13.15" }, "servers": [ {