Polish 6 - The last Polish (#3591)

This commit is contained in:
Joe Milazzo 2025-03-05 17:23:19 -06:00 committed by GitHub
parent 4a4d59bc90
commit 9ad394c43a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 264 additions and 101 deletions

View File

@ -63,6 +63,7 @@ public class UpdateNotificationDto
public IList<string> Developer { get; set; }
public IList<string> Api { get; set; }
public IList<string> FeatureRequests { get; set; }
public IList<string> KnownIssues { get; set; }
/// <summary>
/// The part above the changelog part
/// </summary>

View File

@ -28,6 +28,7 @@ public static class ManualMigrateInvalidBlacklistSeries
.Include(s => s.ExternalSeriesMetadata)
.Where(s => s.IsBlacklisted && s.ExternalSeriesMetadata.ValidUntilUtc > DateTime.MinValue)
.ToListAsync();
foreach (var series in blacklistedSeries)
{
series.IsBlacklisted = false;

View File

@ -0,0 +1,49 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using API.Entities.History;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data.ManualMigrations;
/// <summary>
/// v0.8.5 - There seems to be some scrobble events that are pre-scrobble error table that can be processed over and over.
/// This will take the given year and minus 1 from it and clear everything from that and anything that is errored.
/// </summary>
public static class ManualMigrateScrobbleErrors
{
public static async Task Migrate(DataContext context, ILogger<Program> logger)
{
if (await context.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateScrobbleErrors"))
{
return;
}
logger.LogCritical("Running ManualMigrateScrobbleErrors migration - Please be patient, this may take some time. This is not an error");
// Get all series in the Blacklist table and set their IsBlacklist = true
var events = await context.ScrobbleEvent
.Where(se => se.LastModifiedUtc <= DateTime.UtcNow.AddYears(-1) || se.IsErrored)
.ToListAsync();
context.ScrobbleEvent.RemoveRange(events);
if (context.ChangeTracker.HasChanges())
{
await context.SaveChangesAsync();
logger.LogInformation("Removed {Count} old scrobble events", events.Count);
}
await context.ManualMigrationHistory.AddAsync(new ManualMigrationHistory()
{
Name = "ManualMigrateInvalidBlacklistSeries",
ProductVersion = BuildInfo.Version.ToString(),
RanAt = DateTime.UtcNow
});
await context.SaveChangesAsync();
logger.LogCritical("Running ManualMigrateScrobbleErrors migration - Completed. This is not an error");
}
}

View File

@ -7,13 +7,14 @@ namespace API.Extensions;
public static class FlurlExtensions
{
public static IFlurlRequest WithKavitaPlusHeaders(this string request, string license)
public static IFlurlRequest WithKavitaPlusHeaders(this string request, string license, string? anilistToken = null)
{
return request
.WithHeader("Accept", "application/json")
.WithHeader("User-Agent", "Kavita")
.WithHeader("x-license-key", license)
.WithHeader("x-installId", HashUtil.ServerToken())
.WithHeader("x-anilist-token", anilistToken ?? string.Empty)
.WithHeader("x-kavita-version", BuildInfo.Version)
.WithHeader("Content-Type", "application/json")
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs));

View File

@ -68,9 +68,10 @@ public class ExternalMetadataService : IExternalMetadataService
[LibraryType.Comic, LibraryType.Book, LibraryType.Image, LibraryType.ComicVine];
private readonly SeriesDetailPlusDto _defaultReturn = new()
{
Series = null,
Recommendations = null,
Ratings = ArraySegment<RatingDto>.Empty,
Reviews = ArraySegment<UserReviewDto>.Empty
Ratings = [],
Reviews = []
};
// Allow 50 requests per 24 hours
private static readonly RateLimiter RateLimiter = new RateLimiter(50, TimeSpan.FromHours(24), false);
@ -122,7 +123,7 @@ public class ExternalMetadataService : IExternalMetadataService
var libraryType = libTypes[seriesId];
var success = await FetchSeriesMetadata(seriesId, libraryType);
if (success) count++;
await Task.Delay(1500);
await Task.Delay(6000); // Currently AL is degraded and has 30 requests/min, give a little padding since this is a background request
}
_logger.LogInformation("[Kavita+ Data Refresh] Finished Refreshing {Count} series data from Kavita+", count);
}
@ -148,8 +149,6 @@ public class ExternalMetadataService : IExternalMetadataService
return false;
}
_logger.LogDebug("Prefetching Kavita+ data for Series {SeriesId}", seriesId);
// Prefetch SeriesDetail data
return await GetSeriesDetailPlus(seriesId, libraryType) != null;
}
@ -220,10 +219,12 @@ public class ExternalMetadataService : IExternalMetadataService
MalId = potentialMalId ?? ScrobblingService.GetMalId(series),
};
var token = (await _unitOfWork.UserRepository.GetDefaultAdminUser()).AniListAccessToken;
try
{
var results = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/match-series")
.WithKavitaPlusHeaders(license)
.WithKavitaPlusHeaders(license, token)
.PostJsonAsync(matchRequest)
.ReceiveJson<IList<ExternalSeriesMatchDto>>();
@ -412,10 +413,41 @@ public class ExternalMetadataService : IExternalMetadataService
{
_logger.LogDebug("Fetching Kavita+ Series Detail data for {SeriesName}", string.IsNullOrEmpty(data.SeriesName) ? data.AniListId : data.SeriesName);
var license = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey)).Value;
var result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail")
.WithKavitaPlusHeaders(license)
.PostJsonAsync(data)
.ReceiveJson<SeriesDetailPlusApiDto>(); // This returns an AniListSeries and Match returns ExternalSeriesDto
var token = (await _unitOfWork.UserRepository.GetDefaultAdminUser()).AniListAccessToken;
SeriesDetailPlusApiDto? result = null;
try
{
result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail")
.WithKavitaPlusHeaders(license, token)
.PostJsonAsync(data)
.ReceiveJson<
SeriesDetailPlusApiDto>(); // This returns an AniListSeries and Match returns ExternalSeriesDto
}
catch (FlurlHttpException ex)
{
var errorMessage = await ex.GetResponseStringAsync();
// Trim quotes if the response is a JSON string
errorMessage = errorMessage.Trim('"');
if (ex.StatusCode == 400 && errorMessage.Contains("Too many Requests"))
{
_logger.LogInformation("Hit rate limit, will retry in 3 seconds");
await Task.Delay(3000);
result = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-detail")
.WithKavitaPlusHeaders(license, token)
.PostJsonAsync(data)
.ReceiveJson<
SeriesDetailPlusApiDto>();
}
}
if (result == null)
{
_logger.LogInformation("Hit rate limit twice, try again later");
return _defaultReturn;
}
// Clear out existing results
@ -1353,6 +1385,15 @@ public class ExternalMetadataService : IExternalMetadataService
}
/// <summary>
/// This is to get series information for the recommendation drawer on Kavita
/// </summary>
/// <remarks>This uses a different API that series detail</remarks>
/// <param name="license"></param>
/// <param name="aniListId"></param>
/// <param name="malId"></param>
/// <param name="seriesId"></param>
/// <returns></returns>
private async Task<ExternalSeriesDetailDto?> GetSeriesDetail(string license, int? aniListId, long? malId, int? seriesId)
{
var payload = new ExternalMetadataIdsDto()
@ -1385,8 +1426,9 @@ public class ExternalMetadataService : IExternalMetadataService
}
try
{
var token = (await _unitOfWork.UserRepository.GetDefaultAdminUser()).AniListAccessToken;
var ret = await (Configuration.KavitaPlusApiUrl + "/api/metadata/v2/series-by-ids")
.WithKavitaPlusHeaders(license)
.WithKavitaPlusHeaders(license, token)
.PostJsonAsync(payload)
.ReceiveJson<ExternalSeriesDetailDto>();

View File

@ -223,7 +223,7 @@ public class ScrobblingService : IScrobblingService
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/valid-key?provider=" + provider + "&key=" + token)
.WithKavitaPlusHeaders(license.Value)
.WithKavitaPlusHeaders(license.Value, token)
.GetStringAsync();
return bool.Parse(response);
@ -474,7 +474,7 @@ public class ScrobblingService : IScrobblingService
try
{
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/rate-limit?accessToken=" + aniListToken)
.WithKavitaPlusHeaders(license)
.WithKavitaPlusHeaders(license, aniListToken)
.GetStringAsync();
return int.Parse(response);
@ -566,8 +566,19 @@ public class ScrobblingService : IScrobblingService
return response.RateLeft;
}
catch (FlurlHttpException ex)
catch (FlurlHttpException ex)
{
var errorMessage = await ex.GetResponseStringAsync();
// Trim quotes if the response is a JSON string
errorMessage = errorMessage.Trim('"');
if (errorMessage.Contains("Too Many Requests"))
{
_logger.LogInformation("Hit Too many requests, sleeping to regain requests and retrying");
await Task.Delay(TimeSpan.FromMinutes(10));
return await PostScrobbleUpdate(data, license, evt);
}
_logger.LogError(ex, "Scrobbling to Kavita+ API failed due to error: {ErrorMessage}", ex.Message);
if (ex.Message.Contains("Call failed with status code 500 (Internal Server Error)"))
{
@ -740,7 +751,6 @@ public class ScrobblingService : IScrobblingService
public async Task ProcessUpdatesSinceLastSync()
{
// Check how many scrobble events we have available then only do those.
_logger.LogInformation("Starting Scrobble Processing");
var userRateLimits = new Dictionary<int, int>();
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
@ -788,38 +798,55 @@ public class ScrobblingService : IScrobblingService
.Select(d => d.Event)
.ToList();
// For all userIds, ensure that we can connect and have access
var usersToScrobble = readEvents.Select(r => r.AppUser)
.Concat(addToWantToRead.Select(r => r.AppUser))
.Concat(removeWantToRead.Select(r => r.AppUser))
.Concat(ratingEvents.Select(r => r.AppUser))
.Where(user => !string.IsNullOrEmpty(user.AniListAccessToken))
.Where(user => user.UserPreferences.AniListScrobblingEnabled) // TODO: Add more as we add more support
.DistinctBy(u => u.Id)
.ToList();
foreach (var user in usersToScrobble)
// Get all the applicable users to scrobble and set their rate limits
var usersToScrobble = await PrepareUsersToScrobble(readEvents, addToWantToRead, removeWantToRead, ratingEvents, userRateLimits, license);
var totalEvents = readEvents.Count + decisions.Count + ratingEvents.Count;
if (totalEvents == 0)
{
await SetAndCheckRateLimit(userRateLimits, user, license.Value);
return;
}
var totalProgress = readEvents.Count + decisions.Count + ratingEvents.Count + decisions.Count;
_logger.LogInformation("Scrobble Processing Details:" +
"\n Read Events: {ReadEventsCount}" +
"\n Want to Read Events: {WantToReadEventsCount}" +
"\n Rating Events: {RatingEventsCount}" +
"\n Users to Scrobble: {UsersToScrobbleCount}" +
"\n Total Events to Process: {TotalEvents}",
readEvents.Count,
decisions.Count,
ratingEvents.Count,
usersToScrobble.Count,
totalEvents);
_logger.LogInformation("Found {TotalEvents} Scrobble Events", totalProgress);
try
{
// Recalculate the highest volume/chapter
foreach (var readEvt in readEvents)
{
readEvt.VolumeNumber =
(int) await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(readEvt.SeriesId,
readEvt.AppUser.Id);
readEvt.ChapterNumber =
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(readEvt.SeriesId,
readEvt.AppUser.Id);
_unitOfWork.ScrobbleRepository.Update(readEvt);
}
progressCounter = await ProcessReadEvents(readEvents, userRateLimits, usersToScrobble, totalEvents, progressCounter);
progressCounter = await ProcessEvents(readEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalProgress, async evt => new ScrobbleDto()
progressCounter = await ProcessRatingEvents(ratingEvents, userRateLimits, usersToScrobble, totalEvents, progressCounter);
progressCounter = await ProcessRatingEvents(decisions, userRateLimits, usersToScrobble, totalEvents, addToWantToRead, removeWantToRead, progressCounter);
}
catch (FlurlHttpException ex)
{
_logger.LogError(ex, "Kavita+ API or a Scrobble service may be experiencing an outage. Stopping sending data");
return;
}
await SaveToDb(progressCounter, true);
_logger.LogInformation("Scrobbling Events is complete");
}
private async Task<int> ProcessRatingEvents(List<ScrobbleEvent> decisions, Dictionary<int, int> userRateLimits, List<AppUser> usersToScrobble, int totalEvents,
List<ScrobbleEvent> addToWantToRead, List<ScrobbleEvent> removeWantToRead, int progressCounter)
{
progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter,
totalEvents, evt => Task.FromResult(new ScrobbleDto()
{
Format = evt.Format,
AniListId = evt.AniListId,
@ -830,14 +857,35 @@ public class ScrobblingService : IScrobblingService
AniListToken = evt.AppUser.AniListAccessToken,
SeriesName = evt.Series.Name,
LocalizedSeriesName = evt.Series.LocalizedName,
ScrobbleDateUtc = evt.LastModifiedUtc,
Year = evt.Series.Metadata.ReleaseYear,
StartedReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetFirstProgressForSeries(evt.SeriesId, evt.AppUser.Id),
LatestReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetLatestProgressForSeries(evt.SeriesId, evt.AppUser.Id),
});
Year = evt.Series.Metadata.ReleaseYear
}));
progressCounter = await ProcessEvents(ratingEvents, userRateLimits, usersToScrobble.Count, progressCounter,
totalProgress, evt => Task.FromResult(new ScrobbleDto()
// After decisions, we need to mark all the want to read and remove from want to read as completed
if (decisions.All(d => d.IsProcessed))
{
foreach (var scrobbleEvent in addToWantToRead)
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
_unitOfWork.ScrobbleRepository.Update(scrobbleEvent);
}
foreach (var scrobbleEvent in removeWantToRead)
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
_unitOfWork.ScrobbleRepository.Update(scrobbleEvent);
}
await _unitOfWork.CommitAsync();
}
return progressCounter;
}
private async Task<int> ProcessRatingEvents(List<ScrobbleEvent> ratingEvents, Dictionary<int, int> userRateLimits, List<AppUser> usersToScrobble,
int totalEvents, int progressCounter)
{
return await ProcessEvents(ratingEvents, userRateLimits, usersToScrobble.Count, progressCounter,
totalEvents, evt => Task.FromResult(new ScrobbleDto()
{
Format = evt.Format,
AniListId = evt.AniListId,
@ -849,53 +897,65 @@ public class ScrobblingService : IScrobblingService
Rating = evt.Rating,
Year = evt.Series.Metadata.ReleaseYear
}));
}
progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter,
totalProgress, evt => Task.FromResult(new ScrobbleDto()
{
Format = evt.Format,
AniListId = evt.AniListId,
MALId = (int?) evt.MalId,
ScrobbleEventType = evt.ScrobbleEventType,
ChapterNumber = evt.ChapterNumber,
VolumeNumber = (int?) evt.VolumeNumber,
AniListToken = evt.AppUser.AniListAccessToken,
SeriesName = evt.Series.Name,
LocalizedSeriesName = evt.Series.LocalizedName,
Year = evt.Series.Metadata.ReleaseYear
}));
// After decisions, we need to mark all the want to read and remove from want to read as completed
if (decisions.All(d => d.IsProcessed))
{
foreach (var scrobbleEvent in addToWantToRead)
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
_unitOfWork.ScrobbleRepository.Update(scrobbleEvent);
}
foreach (var scrobbleEvent in removeWantToRead)
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
_unitOfWork.ScrobbleRepository.Update(scrobbleEvent);
}
await _unitOfWork.CommitAsync();
}
}
catch (FlurlHttpException ex)
private async Task<int> ProcessReadEvents(List<ScrobbleEvent> readEvents, Dictionary<int, int> userRateLimits, List<AppUser> usersToScrobble, int totalEvents,
int progressCounter)
{
// Recalculate the highest volume/chapter
foreach (var readEvt in readEvents)
{
_logger.LogError(ex, "Kavita+ API or a Scrobble service may be experiencing an outage. Stopping sending data");
return;
readEvt.VolumeNumber =
(int) await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(readEvt.SeriesId,
readEvt.AppUser.Id);
readEvt.ChapterNumber =
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(readEvt.SeriesId,
readEvt.AppUser.Id);
_unitOfWork.ScrobbleRepository.Update(readEvt);
}
await SaveToDb(progressCounter, true);
_logger.LogInformation("Scrobbling Events is complete");
return await ProcessEvents(readEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalEvents,
async evt => new ScrobbleDto()
{
Format = evt.Format,
AniListId = evt.AniListId,
MALId = (int?) evt.MalId,
ScrobbleEventType = evt.ScrobbleEventType,
ChapterNumber = evt.ChapterNumber,
VolumeNumber = (int?) evt.VolumeNumber,
AniListToken = evt.AppUser.AniListAccessToken!,
SeriesName = evt.Series.Name,
LocalizedSeriesName = evt.Series.LocalizedName,
ScrobbleDateUtc = evt.LastModifiedUtc,
Year = evt.Series.Metadata.ReleaseYear,
StartedReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetFirstProgressForSeries(evt.SeriesId, evt.AppUser.Id),
LatestReadingDateUtc = await _unitOfWork.AppUserProgressRepository.GetLatestProgressForSeries(evt.SeriesId, evt.AppUser.Id),
});
}
private async Task<int> ProcessEvents(IEnumerable<ScrobbleEvent> events, IDictionary<int, int> userRateLimits,
private async Task<List<AppUser>> PrepareUsersToScrobble(List<ScrobbleEvent> readEvents, List<ScrobbleEvent> addToWantToRead, List<ScrobbleEvent> removeWantToRead, List<ScrobbleEvent> ratingEvents,
Dictionary<int, int> userRateLimits, ServerSetting license)
{
// For all userIds, ensure that we can connect and have access
var usersToScrobble = readEvents.Select(r => r.AppUser)
.Concat(addToWantToRead.Select(r => r.AppUser))
.Concat(removeWantToRead.Select(r => r.AppUser))
.Concat(ratingEvents.Select(r => r.AppUser))
.Where(user => !string.IsNullOrEmpty(user.AniListAccessToken))
.Where(user => user.UserPreferences.AniListScrobblingEnabled)
.DistinctBy(u => u.Id)
.ToList();
foreach (var user in usersToScrobble)
{
await SetAndCheckRateLimit(userRateLimits, user, license.Value);
}
return usersToScrobble;
}
private async Task<int> ProcessEvents(IEnumerable<ScrobbleEvent> events, Dictionary<int, int> userRateLimits,
int usersToScrobble, int progressCounter, int totalProgress, Func<ScrobbleEvent, Task<ScrobbleDto>> createEvent)
{
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
@ -914,7 +974,7 @@ public class ScrobblingService : IScrobblingService
{
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
{
Comment = "AniList token has expired and needs rotating. Scrobbles wont work until then",
Comment = "AniList token has expired and needs rotating. Scrobbling wont work until then",
Details = $"User: {evt.AppUser.UserName}",
LibraryId = evt.LibraryId,
SeriesId = evt.SeriesId
@ -923,7 +983,7 @@ public class ScrobblingService : IScrobblingService
return 0;
}
if (await _unitOfWork.ExternalSeriesMetadataRepository.IsBlacklistedSeries(evt.SeriesId))
if (evt.Series.IsBlacklisted || evt.Series.DontMatch)
{
_logger.LogInformation("Series {SeriesName} ({SeriesId}) can't be matched and thus cannot scrobble this event", evt.Series.Name, evt.SeriesId);
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
@ -955,14 +1015,11 @@ public class ScrobblingService : IScrobblingService
var data = await createEvent(evt);
// We need to handle the encoding and changing it to the old one until we can update the API layer to handle these
// which could happen in v0.8.3
if (data.VolumeNumber is Parser.SpecialVolumeNumber)
{
data.VolumeNumber = 0;
}
if (data.VolumeNumber is Parser.DefaultChapterNumber)
if (data.VolumeNumber is Parser.SpecialVolumeNumber or Parser.DefaultChapterNumber)
{
data.VolumeNumber = 0;
}
if (data.ChapterNumber is Parser.DefaultChapterNumber)
{
data.ChapterNumber = 0;
@ -1006,8 +1063,11 @@ public class ScrobblingService : IScrobblingService
{
if (!force || progressCounter % 5 == 0)
{
_logger.LogDebug("Saving Progress");
await _unitOfWork.CommitAsync();
if (_unitOfWork.HasChanges())
{
_logger.LogDebug("Saving Progress");
await _unitOfWork.CommitAsync();
}
}
}

View File

@ -215,7 +215,7 @@ public class TaskScheduler : ITaskScheduler
RecurringJob.AddOrUpdate(LicenseCheckId, () => _licenseService.GetLicenseInfo(false),
LicenseService.Cron, RecurringJobOptions);
// KavitaPlus Scrobbling (every 4 hours)
// KavitaPlus Scrobbling (every hour)
RecurringJob.AddOrUpdate(ProcessScrobblingEventsId, () => _scrobblingService.ProcessUpdatesSinceLastSync(),
"0 */1 * * *", RecurringJobOptions);
RecurringJob.AddOrUpdate(ProcessProcessedScrobblingEventsId, () => _scrobblingService.ClearProcessedEvents(),
@ -223,7 +223,7 @@ public class TaskScheduler : ITaskScheduler
// Backfilling/Freshening Reviews/Rating/Recommendations
RecurringJob.AddOrUpdate(KavitaPlusDataRefreshId,
() => _externalMetadataService.FetchExternalDataTask(), Cron.Daily(Rnd.Next(1, 4)),
() => _externalMetadataService.FetchExternalDataTask(), Cron.Daily(Rnd.Next(1, 5)),
RecurringJobOptions);
// This shouldn't be so close to fetching data due to Rate limit concerns

View File

@ -130,6 +130,7 @@ public partial class VersionUpdaterService : IVersionUpdaterService
Removed = sections.TryGetValue("Removed", out var removed) ? removed : [],
Theme = sections.TryGetValue("Theme", out var theme) ? theme : [],
Developer = sections.TryGetValue("Developer", out var developer) ? developer : [],
KnownIssues = sections.TryGetValue("KnownIssues", out var knownIssues) ? knownIssues : [],
Api = sections.TryGetValue("Api", out var api) ? api : [],
FeatureRequests = sections.TryGetValue("Feature Requests", out var frs) ? frs : [],
BlogPart = _markdown.Transform(blogPart.Trim()),
@ -376,6 +377,7 @@ public partial class VersionUpdaterService : IVersionUpdaterService
Fixed = parsedSections.TryGetValue("Fixed", out var fixes) ? fixes : [],
Theme = parsedSections.TryGetValue("Theme", out var theme) ? theme : [],
Developer = parsedSections.TryGetValue("Developer", out var developer) ? developer : [],
KnownIssues = parsedSections.TryGetValue("Known Issues", out var knownIssues) ? knownIssues : [],
Api = parsedSections.TryGetValue("Api", out var api) ? api : [],
FeatureRequests = parsedSections.TryGetValue("Feature Requests", out var frs) ? frs : [],
BlogPart = blogPart

View File

@ -280,6 +280,7 @@ public class Startup
// v0.8.5
await ManualMigrateBlacklistTableToSeries.Migrate(dataContext, logger);
await ManualMigrateInvalidBlacklistSeries.Migrate(dataContext, logger);
await ManualMigrateScrobbleErrors.Migrate(dataContext, logger);
// Update the version in the DB after all migrations are run
var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion);

View File

@ -18,6 +18,7 @@ export interface UpdateVersionEvent {
developer: Array<string>;
api: Array<string>;
featureRequests: Array<string>;
knownIssues: Array<string>;
/**
* The part above the changelog part
*/

View File

@ -16,6 +16,7 @@
<app-update-section [items]="update.removed" [title]="t('removed')"></app-update-section>
<app-update-section [items]="update.api" [title]="t('api')"></app-update-section>
<app-update-section [items]="update.featureRequests" [title]="t('feature-requests')"></app-update-section>
<app-update-section [items]="update.knownIssues" [title]="t('known-issues')"></app-update-section>
</div>
@if (showExtras) {

View File

@ -11,6 +11,8 @@
<div class="modal-footer">
<a class="btn btn-icon" [href]="updateUrl" target="_blank" rel="noopener noreferrer">{{t('help')}}</a>
<button type="button" class="btn {{updateData.isDocker ? 'btn-primary' : 'btn-secondary'}}" (click)="close()">{{t('close')}}</button>
<a *ngIf="!updateData.isDocker" href="{{updateData.updateUrl}}" class="btn btn-primary" target="_blank" rel="noopener noreferrer" (click)="close()">{{t('download')}}</a>
@if(!updateData.isDocker) {
<a href="{{updateData.updateUrl}}" class="btn btn-primary" target="_blank" rel="noopener noreferrer" (click)="close()">{{t('download')}}</a>
}
</div>
</ng-container>

View File

@ -657,7 +657,8 @@
"api": "API",
"published-label": "Published: ",
"installed": "{{changelog.installed}}",
"feature-requests": "Feature Requests"
"feature-requests": "Feature Requests",
"known-issues": "Known Issues"
},
"new-version-modal": {
@ -669,6 +670,7 @@
"developer": "{{changelog.developer}}",
"theme": "{{changelog.theme}}",
"removed": "{{changelog.removed}}",
"known-issues": "{{changelog.known-issues}}",
"api": "{{changelog.api}}",
"close": "{{common.close}}",
"refresh": "Refresh"