diff --git a/API/DTOs/Update/UpdateNotificationDto.cs b/API/DTOs/Update/UpdateNotificationDto.cs index 535e1f896..449adf131 100644 --- a/API/DTOs/Update/UpdateNotificationDto.cs +++ b/API/DTOs/Update/UpdateNotificationDto.cs @@ -63,6 +63,7 @@ public class UpdateNotificationDto public IList Developer { get; set; } public IList Api { get; set; } public IList FeatureRequests { get; set; } + public IList KnownIssues { get; set; } /// /// The part above the changelog part /// diff --git a/API/Data/ManualMigrations/v0.8.5/ManualMigrateInvalidBlacklistSeries.cs b/API/Data/ManualMigrations/v0.8.5/ManualMigrateInvalidBlacklistSeries.cs index c6516acc9..14bc57cb1 100644 --- a/API/Data/ManualMigrations/v0.8.5/ManualMigrateInvalidBlacklistSeries.cs +++ b/API/Data/ManualMigrations/v0.8.5/ManualMigrateInvalidBlacklistSeries.cs @@ -28,6 +28,7 @@ public static class ManualMigrateInvalidBlacklistSeries .Include(s => s.ExternalSeriesMetadata) .Where(s => s.IsBlacklisted && s.ExternalSeriesMetadata.ValidUntilUtc > DateTime.MinValue) .ToListAsync(); + foreach (var series in blacklistedSeries) { series.IsBlacklisted = false; diff --git a/API/Data/ManualMigrations/v0.8.5/ManualMigrateScrobbleErrors.cs b/API/Data/ManualMigrations/v0.8.5/ManualMigrateScrobbleErrors.cs new file mode 100644 index 000000000..2bc22ca11 --- /dev/null +++ b/API/Data/ManualMigrations/v0.8.5/ManualMigrateScrobbleErrors.cs @@ -0,0 +1,49 @@ +using System; +using System.Linq; +using System.Threading.Tasks; +using API.Entities.History; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Data.ManualMigrations; + +/// +/// v0.8.5 - There seems to be some scrobble events that are pre-scrobble error table that can be processed over and over. +/// This will take the given year and minus 1 from it and clear everything from that and anything that is errored. +/// +public static class ManualMigrateScrobbleErrors +{ + public static async Task Migrate(DataContext context, ILogger logger) + { + if (await context.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateScrobbleErrors")) + { + return; + } + + logger.LogCritical("Running ManualMigrateScrobbleErrors migration - Please be patient, this may take some time. This is not an error"); + + // Get all series in the Blacklist table and set their IsBlacklist = true + var events = await context.ScrobbleEvent + .Where(se => se.LastModifiedUtc <= DateTime.UtcNow.AddYears(-1) || se.IsErrored) + .ToListAsync(); + + context.ScrobbleEvent.RemoveRange(events); + + if (context.ChangeTracker.HasChanges()) + { + await context.SaveChangesAsync(); + logger.LogInformation("Removed {Count} old scrobble events", events.Count); + } + + await context.ManualMigrationHistory.AddAsync(new ManualMigrationHistory() + { + Name = "ManualMigrateInvalidBlacklistSeries", + ProductVersion = BuildInfo.Version.ToString(), + RanAt = DateTime.UtcNow + }); + await context.SaveChangesAsync(); + + logger.LogCritical("Running ManualMigrateScrobbleErrors migration - Completed. This is not an error"); + } +} diff --git a/API/Extensions/FlurlExtensions.cs b/API/Extensions/FlurlExtensions.cs index 67d201afb..efd805045 100644 --- a/API/Extensions/FlurlExtensions.cs +++ b/API/Extensions/FlurlExtensions.cs @@ -7,13 +7,14 @@ namespace API.Extensions; public static class FlurlExtensions { - public static IFlurlRequest WithKavitaPlusHeaders(this string request, string license) + public static IFlurlRequest WithKavitaPlusHeaders(this string request, string license, string? anilistToken = null) { return request .WithHeader("Accept", "application/json") .WithHeader("User-Agent", "Kavita") .WithHeader("x-license-key", license) .WithHeader("x-installId", HashUtil.ServerToken()) + .WithHeader("x-anilist-token", anilistToken ?? string.Empty) .WithHeader("x-kavita-version", BuildInfo.Version) .WithHeader("Content-Type", "application/json") .WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs)); diff --git a/API/Services/Plus/ExternalMetadataService.cs b/API/Services/Plus/ExternalMetadataService.cs index 76e18eb3c..ade35024a 100644 --- a/API/Services/Plus/ExternalMetadataService.cs +++ b/API/Services/Plus/ExternalMetadataService.cs @@ -68,9 +68,10 @@ public class ExternalMetadataService : IExternalMetadataService [LibraryType.Comic, LibraryType.Book, LibraryType.Image, LibraryType.ComicVine]; private readonly SeriesDetailPlusDto _defaultReturn = new() { + Series = null, Recommendations = null, - Ratings = ArraySegment.Empty, - Reviews = ArraySegment.Empty + Ratings = [], + Reviews = [] }; // Allow 50 requests per 24 hours private static readonly RateLimiter RateLimiter = new RateLimiter(50, TimeSpan.FromHours(24), false); @@ -122,7 +123,7 @@ public class ExternalMetadataService : IExternalMetadataService var libraryType = libTypes[seriesId]; var success = await FetchSeriesMetadata(seriesId, libraryType); if (success) count++; - await Task.Delay(1500); + await Task.Delay(6000); // Currently AL is degraded and has 30 requests/min, give a little padding since this is a background request } _logger.LogInformation("[Kavita+ Data Refresh] Finished Refreshing {Count} series data from Kavita+", count); } @@ -148,8 +149,6 @@ public class ExternalMetadataService : IExternalMetadataService return false; } - _logger.LogDebug("Prefetching Kavita+ data for Series {SeriesId}", seriesId); - // Prefetch SeriesDetail data return await GetSeriesDetailPlus(seriesId, libraryType) != null; } @@ -220,10 +219,12 @@ public class ExternalMetadataService : IExternalMetadataService MalId = potentialMalId ?? ScrobblingService.GetMalId(series), }; + var token = (await _unitOfWork.UserRepository.GetDefaultAdminUser()).AniListAccessToken; + try { var results = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/match-series") - .WithKavitaPlusHeaders(license) + .WithKavitaPlusHeaders(license, token) .PostJsonAsync(matchRequest) .ReceiveJson>(); @@ -412,10 +413,41 @@ public class ExternalMetadataService : IExternalMetadataService { _logger.LogDebug("Fetching Kavita+ Series Detail data for {SeriesName}", string.IsNullOrEmpty(data.SeriesName) ? data.AniListId : data.SeriesName); var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value; - var result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail") - .WithKavitaPlusHeaders(license) - .PostJsonAsync(data) - .ReceiveJson(); // This returns an AniListSeries and Match returns ExternalSeriesDto + var token = (await _unitOfWork.UserRepository.GetDefaultAdminUser()).AniListAccessToken; + SeriesDetailPlusApiDto? result = null; + + try + { + result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail") + .WithKavitaPlusHeaders(license, token) + .PostJsonAsync(data) + .ReceiveJson< + SeriesDetailPlusApiDto>(); // This returns an AniListSeries and Match returns ExternalSeriesDto + } + catch (FlurlHttpException ex) + { + var errorMessage = await ex.GetResponseStringAsync(); + // Trim quotes if the response is a JSON string + errorMessage = errorMessage.Trim('"'); + + if (ex.StatusCode == 400 && errorMessage.Contains("Too many Requests")) + { + _logger.LogInformation("Hit rate limit, will retry in 3 seconds"); + await Task.Delay(3000); + + result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail") + .WithKavitaPlusHeaders(license, token) + .PostJsonAsync(data) + .ReceiveJson< + SeriesDetailPlusApiDto>(); + } + } + + if (result == null) + { + _logger.LogInformation("Hit rate limit twice, try again later"); + return _defaultReturn; + } // Clear out existing results @@ -1353,6 +1385,15 @@ public class ExternalMetadataService : IExternalMetadataService } + /// + /// This is to get series information for the recommendation drawer on Kavita + /// + /// This uses a different API that series detail + /// + /// + /// + /// + /// private async Task GetSeriesDetail(string license, int? aniListId, long? malId, int? seriesId) { var payload = new ExternalMetadataIdsDto() @@ -1385,8 +1426,9 @@ public class ExternalMetadataService : IExternalMetadataService } try { + var token = (await _unitOfWork.UserRepository.GetDefaultAdminUser()).AniListAccessToken; var ret = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-by-ids") - .WithKavitaPlusHeaders(license) + .WithKavitaPlusHeaders(license, token) .PostJsonAsync(payload) .ReceiveJson(); diff --git a/API/Services/Plus/ScrobblingService.cs b/API/Services/Plus/ScrobblingService.cs index 3ef5a7f82..c67f6c73f 100644 --- a/API/Services/Plus/ScrobblingService.cs +++ b/API/Services/Plus/ScrobblingService.cs @@ -223,7 +223,7 @@ public class ScrobblingService : IScrobblingService try { var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/valid-key?provider=" + provider + "&key=" + token) - .WithKavitaPlusHeaders(license.Value) + .WithKavitaPlusHeaders(license.Value, token) .GetStringAsync(); return bool.Parse(response); @@ -474,7 +474,7 @@ public class ScrobblingService : IScrobblingService try { var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/rate-limit?accessToken=" + aniListToken) - .WithKavitaPlusHeaders(license) + .WithKavitaPlusHeaders(license, aniListToken) .GetStringAsync(); return int.Parse(response); @@ -566,8 +566,19 @@ public class ScrobblingService : IScrobblingService return response.RateLeft; } - catch (FlurlHttpException ex) + catch (FlurlHttpException ex) { + var errorMessage = await ex.GetResponseStringAsync(); + // Trim quotes if the response is a JSON string + errorMessage = errorMessage.Trim('"'); + + if (errorMessage.Contains("Too Many Requests")) + { + _logger.LogInformation("Hit Too many requests, sleeping to regain requests and retrying"); + await Task.Delay(TimeSpan.FromMinutes(10)); + return await PostScrobbleUpdate(data, license, evt); + } + _logger.LogError(ex, "Scrobbling to Kavita+ API failed due to error: {ErrorMessage}", ex.Message); if (ex.Message.Contains("Call failed with status code 500 (Internal Server Error)")) { @@ -740,7 +751,6 @@ public class ScrobblingService : IScrobblingService public async Task ProcessUpdatesSinceLastSync() { // Check how many scrobble events we have available then only do those. - _logger.LogInformation("Starting Scrobble Processing"); var userRateLimits = new Dictionary(); var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey); @@ -788,38 +798,55 @@ public class ScrobblingService : IScrobblingService .Select(d => d.Event) .ToList(); - // For all userIds, ensure that we can connect and have access - var usersToScrobble = readEvents.Select(r => r.AppUser) - .Concat(addToWantToRead.Select(r => r.AppUser)) - .Concat(removeWantToRead.Select(r => r.AppUser)) - .Concat(ratingEvents.Select(r => r.AppUser)) - .Where(user => !string.IsNullOrEmpty(user.AniListAccessToken)) - .Where(user => user.UserPreferences.AniListScrobblingEnabled) // TODO: Add more as we add more support - .DistinctBy(u => u.Id) - .ToList(); - foreach (var user in usersToScrobble) + // Get all the applicable users to scrobble and set their rate limits + var usersToScrobble = await PrepareUsersToScrobble(readEvents, addToWantToRead, removeWantToRead, ratingEvents, userRateLimits, license); + + var totalEvents = readEvents.Count + decisions.Count + ratingEvents.Count; + + + + if (totalEvents == 0) { - await SetAndCheckRateLimit(userRateLimits, user, license.Value); + return; } - var totalProgress = readEvents.Count + decisions.Count + ratingEvents.Count + decisions.Count; + _logger.LogInformation("Scrobble Processing Details:" + + "\n Read Events: {ReadEventsCount}" + + "\n Want to Read Events: {WantToReadEventsCount}" + + "\n Rating Events: {RatingEventsCount}" + + "\n Users to Scrobble: {UsersToScrobbleCount}" + + "\n Total Events to Process: {TotalEvents}", + readEvents.Count, + decisions.Count, + ratingEvents.Count, + usersToScrobble.Count, + totalEvents); - _logger.LogInformation("Found {TotalEvents} Scrobble Events", totalProgress); try { - // Recalculate the highest volume/chapter - foreach (var readEvt in readEvents) - { - readEvt.VolumeNumber = - (int) await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(readEvt.SeriesId, - readEvt.AppUser.Id); - readEvt.ChapterNumber = - await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(readEvt.SeriesId, - readEvt.AppUser.Id); - _unitOfWork.ScrobbleRepository.Update(readEvt); - } + progressCounter = await ProcessReadEvents(readEvents, userRateLimits, usersToScrobble, totalEvents, progressCounter); - progressCounter = await ProcessEvents(readEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalProgress, async evt => new ScrobbleDto() + progressCounter = await ProcessRatingEvents(ratingEvents, userRateLimits, usersToScrobble, totalEvents, progressCounter); + + progressCounter = await ProcessRatingEvents(decisions, userRateLimits, usersToScrobble, totalEvents, addToWantToRead, removeWantToRead, progressCounter); + } + catch (FlurlHttpException ex) + { + _logger.LogError(ex, "Kavita+ API or a Scrobble service may be experiencing an outage. Stopping sending data"); + return; + } + + + await SaveToDb(progressCounter, true); + _logger.LogInformation("Scrobbling Events is complete"); + + } + + private async Task ProcessRatingEvents(List decisions, Dictionary userRateLimits, List usersToScrobble, int totalEvents, + List addToWantToRead, List removeWantToRead, int progressCounter) + { + progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter, + totalEvents, evt => Task.FromResult(new ScrobbleDto() { Format = evt.Format, AniListId = evt.AniListId, @@ -830,14 +857,35 @@ public class ScrobblingService : IScrobblingService AniListToken = evt.AppUser.AniListAccessToken, SeriesName = evt.Series.Name, LocalizedSeriesName = evt.Series.LocalizedName, - ScrobbleDateUtc = evt.LastModifiedUtc, - Year = evt.Series.Metadata.ReleaseYear, - StartedReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetFirstProgressForSeries(evt.SeriesId, evt.AppUser.Id), - LatestReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetLatestProgressForSeries(evt.SeriesId, evt.AppUser.Id), - }); + Year = evt.Series.Metadata.ReleaseYear + })); - progressCounter = await ProcessEvents(ratingEvents, userRateLimits, usersToScrobble.Count, progressCounter, - totalProgress, evt => Task.FromResult(new ScrobbleDto() + // After decisions, we need to mark all the want to read and remove from want to read as completed + if (decisions.All(d => d.IsProcessed)) + { + foreach (var scrobbleEvent in addToWantToRead) + { + scrobbleEvent.IsProcessed = true; + scrobbleEvent.ProcessDateUtc = DateTime.UtcNow; + _unitOfWork.ScrobbleRepository.Update(scrobbleEvent); + } + foreach (var scrobbleEvent in removeWantToRead) + { + scrobbleEvent.IsProcessed = true; + scrobbleEvent.ProcessDateUtc = DateTime.UtcNow; + _unitOfWork.ScrobbleRepository.Update(scrobbleEvent); + } + await _unitOfWork.CommitAsync(); + } + + return progressCounter; + } + + private async Task ProcessRatingEvents(List ratingEvents, Dictionary userRateLimits, List usersToScrobble, + int totalEvents, int progressCounter) + { + return await ProcessEvents(ratingEvents, userRateLimits, usersToScrobble.Count, progressCounter, + totalEvents, evt => Task.FromResult(new ScrobbleDto() { Format = evt.Format, AniListId = evt.AniListId, @@ -849,53 +897,65 @@ public class ScrobblingService : IScrobblingService Rating = evt.Rating, Year = evt.Series.Metadata.ReleaseYear })); + } - progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter, - totalProgress, evt => Task.FromResult(new ScrobbleDto() - { - Format = evt.Format, - AniListId = evt.AniListId, - MALId = (int?) evt.MalId, - ScrobbleEventType = evt.ScrobbleEventType, - ChapterNumber = evt.ChapterNumber, - VolumeNumber = (int?) evt.VolumeNumber, - AniListToken = evt.AppUser.AniListAccessToken, - SeriesName = evt.Series.Name, - LocalizedSeriesName = evt.Series.LocalizedName, - Year = evt.Series.Metadata.ReleaseYear - })); - - // After decisions, we need to mark all the want to read and remove from want to read as completed - if (decisions.All(d => d.IsProcessed)) - { - foreach (var scrobbleEvent in addToWantToRead) - { - scrobbleEvent.IsProcessed = true; - scrobbleEvent.ProcessDateUtc = DateTime.UtcNow; - _unitOfWork.ScrobbleRepository.Update(scrobbleEvent); - } - foreach (var scrobbleEvent in removeWantToRead) - { - scrobbleEvent.IsProcessed = true; - scrobbleEvent.ProcessDateUtc = DateTime.UtcNow; - _unitOfWork.ScrobbleRepository.Update(scrobbleEvent); - } - await _unitOfWork.CommitAsync(); - } - } - catch (FlurlHttpException ex) + private async Task ProcessReadEvents(List readEvents, Dictionary userRateLimits, List usersToScrobble, int totalEvents, + int progressCounter) + { + // Recalculate the highest volume/chapter + foreach (var readEvt in readEvents) { - _logger.LogError(ex, "Kavita+ API or a Scrobble service may be experiencing an outage. Stopping sending data"); - return; + readEvt.VolumeNumber = + (int) await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(readEvt.SeriesId, + readEvt.AppUser.Id); + readEvt.ChapterNumber = + await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(readEvt.SeriesId, + readEvt.AppUser.Id); + _unitOfWork.ScrobbleRepository.Update(readEvt); } - - await SaveToDb(progressCounter, true); - _logger.LogInformation("Scrobbling Events is complete"); + return await ProcessEvents(readEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalEvents, + async evt => new ScrobbleDto() + { + Format = evt.Format, + AniListId = evt.AniListId, + MALId = (int?) evt.MalId, + ScrobbleEventType = evt.ScrobbleEventType, + ChapterNumber = evt.ChapterNumber, + VolumeNumber = (int?) evt.VolumeNumber, + AniListToken = evt.AppUser.AniListAccessToken!, + SeriesName = evt.Series.Name, + LocalizedSeriesName = evt.Series.LocalizedName, + ScrobbleDateUtc = evt.LastModifiedUtc, + Year = evt.Series.Metadata.ReleaseYear, + StartedReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetFirstProgressForSeries(evt.SeriesId, evt.AppUser.Id), + LatestReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetLatestProgressForSeries(evt.SeriesId, evt.AppUser.Id), + }); } - private async Task ProcessEvents(IEnumerable events, IDictionary userRateLimits, + private async Task> PrepareUsersToScrobble(List readEvents, List addToWantToRead, List removeWantToRead, List ratingEvents, + Dictionary userRateLimits, ServerSetting license) + { + // For all userIds, ensure that we can connect and have access + var usersToScrobble = readEvents.Select(r => r.AppUser) + .Concat(addToWantToRead.Select(r => r.AppUser)) + .Concat(removeWantToRead.Select(r => r.AppUser)) + .Concat(ratingEvents.Select(r => r.AppUser)) + .Where(user => !string.IsNullOrEmpty(user.AniListAccessToken)) + .Where(user => user.UserPreferences.AniListScrobblingEnabled) + .DistinctBy(u => u.Id) + .ToList(); + foreach (var user in usersToScrobble) + { + await SetAndCheckRateLimit(userRateLimits, user, license.Value); + } + + return usersToScrobble; + } + + + private async Task ProcessEvents(IEnumerable events, Dictionary userRateLimits, int usersToScrobble, int progressCounter, int totalProgress, Func> createEvent) { var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey); @@ -914,7 +974,7 @@ public class ScrobblingService : IScrobblingService { _unitOfWork.ScrobbleRepository.Attach(new ScrobbleError() { - Comment = "AniList token has expired and needs rotating. Scrobbles wont work until then", + Comment = "AniList token has expired and needs rotating. Scrobbling wont work until then", Details = $"User: {evt.AppUser.UserName}", LibraryId = evt.LibraryId, SeriesId = evt.SeriesId @@ -923,7 +983,7 @@ public class ScrobblingService : IScrobblingService return 0; } - if (await _unitOfWork.ExternalSeriesMetadataRepository.IsBlacklistedSeries(evt.SeriesId)) + if (evt.Series.IsBlacklisted || evt.Series.DontMatch) { _logger.LogInformation("Series {SeriesName} ({SeriesId}) can't be matched and thus cannot scrobble this event", evt.Series.Name, evt.SeriesId); _unitOfWork.ScrobbleRepository.Attach(new ScrobbleError() @@ -955,14 +1015,11 @@ public class ScrobblingService : IScrobblingService var data = await createEvent(evt); // We need to handle the encoding and changing it to the old one until we can update the API layer to handle these // which could happen in v0.8.3 - if (data.VolumeNumber is Parser.SpecialVolumeNumber) - { - data.VolumeNumber = 0; - } - if (data.VolumeNumber is Parser.DefaultChapterNumber) + if (data.VolumeNumber is Parser.SpecialVolumeNumber or Parser.DefaultChapterNumber) { data.VolumeNumber = 0; } + if (data.ChapterNumber is Parser.DefaultChapterNumber) { data.ChapterNumber = 0; @@ -1006,8 +1063,11 @@ public class ScrobblingService : IScrobblingService { if (!force || progressCounter % 5 == 0) { - _logger.LogDebug("Saving Progress"); - await _unitOfWork.CommitAsync(); + if (_unitOfWork.HasChanges()) + { + _logger.LogDebug("Saving Progress"); + await _unitOfWork.CommitAsync(); + } } } diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index 2344659ec..0aeb8c189 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -215,7 +215,7 @@ public class TaskScheduler : ITaskScheduler RecurringJob.AddOrUpdate(LicenseCheckId, () => _licenseService.GetLicenseInfo(false), LicenseService.Cron, RecurringJobOptions); - // KavitaPlus Scrobbling (every 4 hours) + // KavitaPlus Scrobbling (every hour) RecurringJob.AddOrUpdate(ProcessScrobblingEventsId, () => _scrobblingService.ProcessUpdatesSinceLastSync(), "0 */1 * * *", RecurringJobOptions); RecurringJob.AddOrUpdate(ProcessProcessedScrobblingEventsId, () => _scrobblingService.ClearProcessedEvents(), @@ -223,7 +223,7 @@ public class TaskScheduler : ITaskScheduler // Backfilling/Freshening Reviews/Rating/Recommendations RecurringJob.AddOrUpdate(KavitaPlusDataRefreshId, - () => _externalMetadataService.FetchExternalDataTask(), Cron.Daily(Rnd.Next(1, 4)), + () => _externalMetadataService.FetchExternalDataTask(), Cron.Daily(Rnd.Next(1, 5)), RecurringJobOptions); // This shouldn't be so close to fetching data due to Rate limit concerns diff --git a/API/Services/Tasks/VersionUpdaterService.cs b/API/Services/Tasks/VersionUpdaterService.cs index a52fec020..ce79b6f8a 100644 --- a/API/Services/Tasks/VersionUpdaterService.cs +++ b/API/Services/Tasks/VersionUpdaterService.cs @@ -130,6 +130,7 @@ public partial class VersionUpdaterService : IVersionUpdaterService Removed = sections.TryGetValue("Removed", out var removed) ? removed : [], Theme = sections.TryGetValue("Theme", out var theme) ? theme : [], Developer = sections.TryGetValue("Developer", out var developer) ? developer : [], + KnownIssues = sections.TryGetValue("KnownIssues", out var knownIssues) ? knownIssues : [], Api = sections.TryGetValue("Api", out var api) ? api : [], FeatureRequests = sections.TryGetValue("Feature Requests", out var frs) ? frs : [], BlogPart = _markdown.Transform(blogPart.Trim()), @@ -376,6 +377,7 @@ public partial class VersionUpdaterService : IVersionUpdaterService Fixed = parsedSections.TryGetValue("Fixed", out var fixes) ? fixes : [], Theme = parsedSections.TryGetValue("Theme", out var theme) ? theme : [], Developer = parsedSections.TryGetValue("Developer", out var developer) ? developer : [], + KnownIssues = parsedSections.TryGetValue("Known Issues", out var knownIssues) ? knownIssues : [], Api = parsedSections.TryGetValue("Api", out var api) ? api : [], FeatureRequests = parsedSections.TryGetValue("Feature Requests", out var frs) ? frs : [], BlogPart = blogPart diff --git a/API/Startup.cs b/API/Startup.cs index 5f8c0a30e..92280f9ca 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -280,6 +280,7 @@ public class Startup // v0.8.5 await ManualMigrateBlacklistTableToSeries.Migrate(dataContext, logger); await ManualMigrateInvalidBlacklistSeries.Migrate(dataContext, logger); + await ManualMigrateScrobbleErrors.Migrate(dataContext, logger); // Update the version in the DB after all migrations are run var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion); diff --git a/UI/Web/src/app/_models/events/update-version-event.ts b/UI/Web/src/app/_models/events/update-version-event.ts index 63661e5e5..4e7e82ce6 100644 --- a/UI/Web/src/app/_models/events/update-version-event.ts +++ b/UI/Web/src/app/_models/events/update-version-event.ts @@ -18,6 +18,7 @@ export interface UpdateVersionEvent { developer: Array; api: Array; featureRequests: Array; + knownIssues: Array; /** * The part above the changelog part */ diff --git a/UI/Web/src/app/announcements/_components/changelog-update-item/changelog-update-item.component.html b/UI/Web/src/app/announcements/_components/changelog-update-item/changelog-update-item.component.html index 8d95bc84b..6f8411870 100644 --- a/UI/Web/src/app/announcements/_components/changelog-update-item/changelog-update-item.component.html +++ b/UI/Web/src/app/announcements/_components/changelog-update-item/changelog-update-item.component.html @@ -16,6 +16,7 @@ + @if (showExtras) { diff --git a/UI/Web/src/app/shared/update-notification/update-notification-modal.component.html b/UI/Web/src/app/shared/update-notification/update-notification-modal.component.html index 8a1ccd412..b8d3c2b38 100644 --- a/UI/Web/src/app/shared/update-notification/update-notification-modal.component.html +++ b/UI/Web/src/app/shared/update-notification/update-notification-modal.component.html @@ -11,6 +11,8 @@ diff --git a/UI/Web/src/assets/langs/en.json b/UI/Web/src/assets/langs/en.json index 44bc66111..c2dcbe6c3 100644 --- a/UI/Web/src/assets/langs/en.json +++ b/UI/Web/src/assets/langs/en.json @@ -657,7 +657,8 @@ "api": "API", "published-label": "Published: ", "installed": "{{changelog.installed}}", - "feature-requests": "Feature Requests" + "feature-requests": "Feature Requests", + "known-issues": "Known Issues" }, "new-version-modal": { @@ -669,6 +670,7 @@ "developer": "{{changelog.developer}}", "theme": "{{changelog.theme}}", "removed": "{{changelog.removed}}", + "known-issues": "{{changelog.known-issues}}", "api": "{{changelog.api}}", "close": "{{common.close}}", "refresh": "Refresh"