mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-06-02 21:24:18 -04:00
* Initial Canary Push (#2055) * Added AniList Token * Implemented the ability to set your AniList token. License check is not in place. * Added a check that validates AniList token is still valid. As I build out more support, I will add more checks. * Refactored the code to validate the license before allowing UI control to be edited. * Started license server stuff, but may need to change approach. Hooked up ability to scrobble rating events to KavitaPlus API. * Hooked in the ability to sync Mark Series as Read/Unread * Fixed up unit tests and only scrobble when a full chapter is read naturally. * Fixed up the Scrobbling service * Tweak one of the queries * Started an idea for Scrobble History, might rework into generic TaskHistory. * AniList Token now has a validation check. * Implemented a mechanism such that events are persisted to the database, processed every X hours to the API layer, then deleted from the database. * Hooked in code for want to read so we only send what's important. Will migrate these to bulk calls to lessen strain on API server. * Added some todos. Need to take a break. * Hooked up the ability to backfill scrobble events after turning it on. * Started on integrating license key into the server and ability to turn off scrobbling at the library level. Added sync history table for scrobbling and other API based information. * Started writing to sync table * Refactored the migrations to flatten them. Started working a basic license add flow and added in some of the cache. Lots to do. * Ensure that when we backfill scrobble events, we respect if a library has scrobbling turned on or not. * Hooked up the ability to send when the series was started to be read * Refactored the UI to streamline and group KavitaPlus Account Forms. * Aligning with API * Fixed bad merge * Fixed up inputting a user license. * Hooked up a cron task that validates licenses every 4 hours and on startup. * Reworked how the update license code works so that we always update the cache and we handle removing license from user. * Cleaned up some UI code * UserDto now has if there is a valid license or not. It's not exposed though as there is no need to expose the license key ever. * Fixed a strange encoding issue with extra ". Started working on having the UI aware of the license information. Refactored all code to properly pass the correct license to the API layer. * There is a circular dependency in the code. Fixed some theme code which wasn't checking the right variable. Reworked the JWT interceptor to be better at handling async code. Lots of misc code changes, DI circular issue is still present. * Fixed the DI issue and moved all things that need bootstrapping to app.component. * Hooked up the ability to not have a donation button show up if the server default user/admin has a valid KavitaPlus license. * Refactored how we extract out ids from weblinks * Ensure if API fails, we don't delete the record. * Refactored how rate checks occur for scrobbling processing. * Lots of testing and ensuring rate limit doesn't get destroyed. * Ensure the media item is valid for that user's providers set. * Refactored the loop code into one method to keep things much cleaner * Lots of code to get the scrobbling streamlined and foolproof. Unknown series are now reported on the UI. * Prevent duplicates for scrobble errors. * Ensure we are sending the correct type to the Scrobble Provider * Ensure we send the date of the scrobble event for upstream to use. * Replaced the dedicated run backfilling of scrobble events to just trigger when setting the anilist token for the first time. Streamlined a lot of the code for adding your license to ensure user understands how it works. * Fixed a bug where scan series wasn't triggering word count or cover generation. * Started the plumbing for recommendations * Merge conflicts * Recommendation plumbing is nearly complete. * Setup response caching and general cleanup * Fixed UI not showing the recommendation tab * Switched to prod url * Fixed broken unit tests due to Hangfire not being setup for unit tests * Fixed branch selection (#2056) * Damn you GA (#2058) * Bump versions by dotnet-bump-version. * Fixed GA not pulling the right branch and removed unneeded building from veresion job (#2060) * Bump versions by dotnet-bump-version. * Canary Second (#2071) * Just started * Started building the user review card. Fixed Recommendations not having user progress on them. * Fixed a bug where scrobbling ratings wasn't working. * Added a temp ability to trigger scrobbling processing for testing. * Cleaned up the design of review card. Added a temp way to trigger scrobbling. * Fixed clear scrobbling errors and refactored so reviews now load from DB and is streamlined. * Refactored so edit review is now a single module component and editable from the series detail page. * Removed SyncHistory table as it's no longer needed. Refactored read events to properly update to the latest progress information. Refactored to a new way of clearing events, so that user's can see their scrobble history. * Fixed a bug where Anilist token wouldn't show as set due to some state issue * Added the ability to see your own scrobble events * Avoid a potential collision with recommendations. * Fixed an issue where when checking for a license on UI, it wouldn't force the check (in case server was down on first check). * External reviews are implemented. * Fixed unit tests * Bump versions by dotnet-bump-version. * Made the api url dynamic based on dev more or not. (#2072) * Bump versions by dotnet-bump-version. * Canary Build 3 (#2079) * Updated reviews to have tagline support to match how Anilist has them. Cleaned up the KavitaPlus documentation and added a feature list. Review cards look much better. * Fixed up a NPE in scrobble event creation * Removed the ability to have images leak in the read more review card. Review's now show the user if they are a local user, else External. * Added caching to the reviews and recommendations that come from an external source. Max of 50MB will be used across whole instance. Entries are cached for 1 hour. * Reviews are looking much better * Added the ability for users to share their series reviews with other users on the server via a new opt-in mechanism. Fixed up some cache busting mechanism for reviews. * More review polish to align with better matching * Added the extra information for Recommendation matching. * Preview of the review is much cleaner now and the full body is styled better. * More anilist specific syntax * Fixed bad regex * Added the ability to bust cache. Spoilers are now implemented for reviews. Introduces: --review-spoiler-bg-color --review-spoiler-text-color * Bump versions by dotnet-bump-version. * Canary Build 4 (#2086) * Updated Kavita Plus feature list. Added a hover-over to the progress bars in the app to know exact percentage of reading for a chapter or series. * Added a button to go to external review. Changed how enums show in the documentation so you can see their string value too. Limited reviews to top 10 with proper ordering. Drastically cleaned up how we handle preview summary generation * Cleaned up the margin below review section * Fixed an issue where a processed scrobble event would get updated instead of a new event created. * By default, there is now a prompt on series review to add your own, which fills up the space nicely. Added the backend for Series Holds. * Scrobble History is now ordered by recent -> latest. Some minor cleanup in other files. * Added a simple way to see and toggle scrobble service from the series. * Fixed a bug where updating the user's last active time wasn't writing to database and causing a logout event. * Tweaked the registration email wording to be more clear for email field. * Improved OPDS Url generation and included using host name if defined. * Fixed the issues with choosing the correct series cover image. Added many unit tests to cover the edge cases. * Small cleanup * Fixed an issue where urls with , in them would break weblinks. * Fixed a bug where we weren't trying a png before we hit fallback for favicon parsing. * Ensure scrobbling tab isn't active without a license. Changed how updating user last active worked to supress more concurrency issues. * Fixed an issue where duplicate series could appear on newly added during a scan. * Bump versions by dotnet-bump-version. * Fixed a bad dto (#2087) * Bump versions by dotnet-bump-version. * Canary Build 4 (#2089) * New server-based auth is in place with the ability to register the instance. * Refactored to single install bound licensing. * Made the Kavita+ tab gold. * Change the JWTs to last 10 days. This is a self-hosted software and the usage doesn't need the level of 2 days expiration * Bump versions by dotnet-bump-version. * Canary Build 4 (#2090) * By default, a new library will only have scrobbling on if it's of type book or manga given current scrobble providers. * Started building out external reviews. * Added the ability to re-enter your license information. * Fixed side nav not extending enough * Fixed a bug with info cards * Integrated rating support, fixed review cards without a tagline, and misc fixes. * Streamlined where ratings are located on series detail page. * Aligned with other series lookups * Bump versions by dotnet-bump-version. * Canary Build 6 (#2092) * Cleaned up some messaging * Fixed up series detail * Cleanup * Bump versions by dotnet-bump-version. * Canary Build 6 (#2093) * Fixed scrobble token not being visible by default. * Added a loader for external reviews * Added the ability to edit series details (weblinks) from Scrobble Issues page. * Slightly lessened the focus on buttons * Fixed review cards so whenever you click your own review, it will open the edit modal. * Need for speed - Updated Kavita log to be much smaller and replaced all code ones with a 32x version. * Optimized a ton of our images to be much smaller and faster to load. * Added more MIME types for response compression * Edit Series modal name field should be readonly as it is directly mapped to file metadata or filename parsed. It shouldn't be changeable via the UI. * Removed the ability to update the Series name via Kavita UI/API as it is no longer editable. * Moved Image component to be standalone * Moved ReadMore component to be standalone * Moved PersonBadge component to be standalone * Moved IconAndTitle component to be standalone * Fixed some bugs with standalone. * Hooked in the ability to scrobble series reviews. * Refactored everything to use HashUtil token rather than InstallId. * Swapped over to a generated machine token and fixed an issue where after registering, the license would not say valid. * Added the missing migration for review scrobble events. * Clean up some wording around busting cache. * Fixed a bug where chapters within a volume could be unordered in the UI info screen. * Refactored to prepare for external series rendering on series detail. * Implemented external recs * Bump versions by dotnet-bump-version. * Canary Build 7 (#2097) * Aligned ExtractId to extract a long, since MAL id can be just that. * Fixed external series card not clicking correctly. Fixed a bug when extracting a Mal link. Fixed cancel button on license component. * Renamed user-license to license component given new direction for licensing. * Implemented card layout for recommendations * Moved more components over to be standalone and removed pipes module. This is going to take some time for sure. * Removed Cards and SharedCardsSideNav and SideNav over to standalone. This has been shaken out. * Cleaned up a bunch of extra space on reading list detail page. * Fixed rating popover not having a black triangle. * When checking license, show a loading indicator for validity icon. * Cache size can now be changed by admins if they want to give more memory for better browsing. * Added LastReadTime * Cleanup the scrobbling control text for Library Settings. * Fixed yet another edge case for getting series cover image where first volume is higher than 1 and the rest is just loose leaf chapters. * Changed OPDS Content Type to be application/atom+xml to align better with the spec. * Fixed unit tests * Bump versions by dotnet-bump-version. * Canary Build 7 (#2098) * Fixed the percentage readout on card item progress bar * Ensure scrobble control is always visible * Review card could show person icon in tablet viewport. * Changed how the ServerToken for node locking works as docker was giving different results each time. * After we update series metadata, bust cache * License componet cleanup on the styles * Moved license to admin module and removed feature modal as wiki is much easier to maintain. * Bump versions by dotnet-bump-version. * Canary Build 8 (#2100) * Fixed a very slight amount of the active nav tag bleeding outside the border radius * Switched how we count words in epub to handle languages that don't have spaces. * Updated dependencies and fixed a series cover image on list item view for recs. * Fixed a bug where external recs werent showing summary of the series. * Rewrote the rec loop to be cleaner * Added the ability to see series summary on series detail page on list view. Changed Scrobble Event page to show in server time and not utc. * Added tons of output to identify why unraid generates a new fingerprint each time. * Refactored scrobble event table to have filtering and pagination support. Fixed a few bad template issues and fixed loading scrobbling tab on refresh of page. * Aligned a few apis to use a default pagination rather than a higher level one. * Undo OPDS change as Chunky/Panels break. * Moved the holds code around * Don't show an empty review for the user, it eats up uneeded space and is ugly. * Cleaned up the review code * Fixed a bug with arrow on sortable table header. * More scrobbling debug information to ensure events are being processed correctly. * Applied a ton of code cleanup build warnings * Enhanced rec matching by prioritizing matching on weblinks before falling back to name matching. * Fixed the calculation of word count for epubs. * Bump versions by dotnet-bump-version. * Canary Build 9 (#2104) * Added another unit test * Changed how we create cover images to force the aspect ratio, which allows for Kavita to do some extra work later down the line. Prevents skewing from comic sources. * Code cleanup * Updated signatures to explicitly indicate they return a physical file. * Refactored the GA to be a bit more streamlined. * Fixed up how after cover conversion, how we refresh volume and series image links. * Undid the PhysicalFileResult stuff. * Fixed an issue in the epub reader where html tags within an anchor could break the navigation code for inner-links. * Fixed a bug in GetContinueChapter where a special could appear ahead of a loose leaf chapter. * Optimized aspect ratios for custom library images to avoid shift layout. Moved the series detail page down a bit to be inline with first row of actionables. * Finally fixed the media conversion issue where volumes and series wouldn't get their file links updated. * Added some new layout for license to allow a user to buy a sub after their last sub expired. * Added more metrics for fingerprinting to test on docker. * Tried to fix a bug with getnextchapter looping incorrectly, but unable to solve. * Cleanup some UI stuff to reduce bad calls. * Suppress annoying issues with reaching K+ when it's down (only affects local builds) * Fixed an edge case bug for picking the correct cover image for a series. * Fixed a bug where typeahead x wouldn't clear out the input field. * Renamed Clear -> Reset for metadata filter to be more informative of its function. * Don't allow duplicates for reading list characters. * Fixed a bug where when calculating recently updated, series with the same name but different libraries could get grouped. * Fixed an issue with fit to height where there could still be a small amount of scroll due to a timing issue with the image loading. * Don't show a loading if the user doesn't have a license for external ratings * Fixed bad stat url * Fixed up licensing to make it so you have to email me to get a sub renewed. * Updated deps * When scrobbling reading events, recalculate the highest chapter/volume during processing. * Code cleanup * Disabled some old test code that is likely not needed as it breaks a lot on netvips updates * Bump versions by dotnet-bump-version. * Canary Build 10 (#2105) * Aligned fingerprint to be unique * Updated email button to have a template * Fixed inability to progress to next chapter when last page is a spread and user is using split rendering. * Attempted fix at the column reader cutting off parts of the words. Can't fully reproduce, but added a bit of padding to help. * Aligned AniList icon to match that of weblinks. * Bump versions by dotnet-bump-version. * Canary Build 11 (#2108) * Fixed an issue with continuous reader in manga reader. * Aligned KavitaPlus->Kavita+ * Updated the readme * Adjusted first time registration messaging. * Fixed a bug where having just one type of weblink could cause a bad recommendation lookup * Removed manual invocation of scrobbling as testing is over for that feature. * Fixed a bad observerable for downloading logs from browser. * Don't get reviews/recs for comic libraries. Override user selection for scrobbling on Comics since there are no places to scrobble to. * Added a migration so all existing comic libraries will have scrobbling turned off. * Don't allow the UI to toggle scrobbling on a library with no providers. * Refactored the code to not throw generic 500 toasts on the UI. Added the ability to clear your license on Kavita side. * Converted reader settings to new accordion format. * Converted user preferences to new accordion format. * I couldn't convert CBL Reading modal to new accordion directives due to some weird bug. * Migrated the whole application to standalone components. This fixes the download progress bar not showing up. * Hooked up the ability to have reading list generate random items. Removed the old code as it's no longer needed. * Added random covers for collection's as well. * Added a speed up to not regenerate merged covers if we've already created them. * Fixed an issue where tooltips weren't styled correctly after updating a library. Migrated Library access modal to OnPush. * Fixed broken table styling. Fixed grid breakpoint css variables not using the ones from variables due to a missing import. * Misc fixes around tables and some api doc cleanup * Fixed a bug where when switching from webtoon back to a non-webtoon reading mode, if the browser size isn't large enough for double, the reader wouldn't go to single mode. * When combining external recs, normalize names to filter out differences, like capitalization. * Finally get to update ExCSS to the latest version! This adds much more css properties for epubs. * Ensure rejected reviews are saved as errors * A crap ton of code cleanup * Cleaned up some equality code in GenreHelper.cs * Fixed up the table styling after the bootstrap update changed it. * Bump versions by dotnet-bump-version. * Canary Build 12 (#2111) * Aligned GA (#2059) * Fixed the code around merging images to resize them. This will only look correct if this release's cover generation runs. * Misc code cleanup * Fixed an issue with epub column layout cutting off text * Collection detail page will now default sort by sort name. * Explicitly lazy load library icon images. * Make sure the full error message can be passed to the license component/user. * Use WhereIf in some places * Changed the hash util code for unraid again * Fixed up an issue with split render mode where last page wouldn't move into the next chapter. * Bump versions by dotnet-bump-version. * Don't ask me how, but i think I fixed the epub cutoff issue (#2112) * Bump versions by dotnet-bump-version. * Canary 14 (#2113) * Switched how we build the unraid fingerprint. * Fixed a bit of space below the image on fit to height * Removed some bad code * Bump versions by dotnet-bump-version. * Canary Build 15 (#2114) * When performing a scan series, force a recount of words/pages to ensure read time gets updated. * Fixed broken download logs button (develop) * Sped up the query for getting libraries and added caching for that api, which is helpful for users with larger library counts. * Fixed an issue in directory picker where if you had two folders with the same name, the 2nd to last wouldn't be clickable. * Added more destroy ref stuff. * Switched the buy/manage links over to be environment specific. * Bump versions by dotnet-bump-version. * Canary Build 16 (#2115) * Added the promo code for K+ and version bump. * Don't show see more if there isn't more to see on series detail. * Bump versions by dotnet-bump-version. * Last Build (#2116) * Merge * Close the view after removing a license key from server. * Bump versions by dotnet-bump-version. * Reset version to v0.7.4 for merge.
814 lines
35 KiB
C#
814 lines
35 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.Collections.Immutable;
|
|
using System.Linq;
|
|
using System.Net.Http;
|
|
using System.Threading.Tasks;
|
|
using API.Data;
|
|
using API.Data.Repositories;
|
|
using API.DTOs.Filtering;
|
|
using API.DTOs.Scrobbling;
|
|
using API.Entities;
|
|
using API.Entities.Enums;
|
|
using API.Entities.Scrobble;
|
|
using API.Helpers;
|
|
using API.SignalR;
|
|
using Flurl.Http;
|
|
using Hangfire;
|
|
using Kavita.Common;
|
|
using Kavita.Common.EnvironmentInfo;
|
|
using Kavita.Common.Helpers;
|
|
using Microsoft.Extensions.Logging;
|
|
|
|
namespace API.Services.Plus;
|
|
|
|
public enum ScrobbleProvider
|
|
{
|
|
AniList = 1
|
|
}
|
|
|
|
public interface IScrobblingService
|
|
{
|
|
Task CheckExternalAccessTokens();
|
|
Task<bool> HasTokenExpired(int userId, ScrobbleProvider provider);
|
|
Task ScrobbleRatingUpdate(int userId, int seriesId, int rating);
|
|
Task ScrobbleReviewUpdate(int userId, int seriesId, string reviewTitle, string reviewBody);
|
|
Task ScrobbleReadingUpdate(int userId, int seriesId);
|
|
Task ScrobbleWantToReadUpdate(int userId, int seriesId, bool onWantToRead);
|
|
|
|
[DisableConcurrentExecution(60 * 60 * 60)]
|
|
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
|
public Task ClearProcessedEvents();
|
|
[DisableConcurrentExecution(60 * 60 * 60)]
|
|
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
|
Task ProcessUpdatesSinceLastSync();
|
|
Task CreateEventsFromExistingHistory(int userId = 0);
|
|
}
|
|
|
|
public class ScrobblingService : IScrobblingService
|
|
{
|
|
private readonly IUnitOfWork _unitOfWork;
|
|
private readonly ITokenService _tokenService;
|
|
private readonly IEventHub _eventHub;
|
|
private readonly ILogger<ScrobblingService> _logger;
|
|
private readonly ILicenseService _licenseService;
|
|
|
|
public const string AniListWeblinkWebsite = "https://anilist.co/manga/";
|
|
public const string MalWeblinkWebsite = "https://myanimelist.net/manga/";
|
|
|
|
private static readonly IDictionary<string, int> WeblinkExtractionMap = new Dictionary<string, int>()
|
|
{
|
|
{AniListWeblinkWebsite, 0},
|
|
{MalWeblinkWebsite, 0},
|
|
};
|
|
|
|
private const int ScrobbleSleepTime = 700; // We can likely tie this to AniList's 90 rate / min ((60 * 1000) / 90)
|
|
|
|
private static readonly IList<ScrobbleProvider> BookProviders = new List<ScrobbleProvider>()
|
|
{
|
|
ScrobbleProvider.AniList
|
|
};
|
|
private static readonly IList<ScrobbleProvider> ComicProviders = new List<ScrobbleProvider>();
|
|
private static readonly IList<ScrobbleProvider> MangaProviders = new List<ScrobbleProvider>()
|
|
{
|
|
ScrobbleProvider.AniList
|
|
};
|
|
|
|
|
|
public ScrobblingService(IUnitOfWork unitOfWork, ITokenService tokenService,
|
|
IEventHub eventHub, ILogger<ScrobblingService> logger, ILicenseService licenseService)
|
|
{
|
|
_unitOfWork = unitOfWork;
|
|
_tokenService = tokenService;
|
|
_eventHub = eventHub;
|
|
_logger = logger;
|
|
_licenseService = licenseService;
|
|
|
|
FlurlHttp.ConfigureClient(Configuration.KavitaPlusApiUrl, cli =>
|
|
cli.Settings.HttpClientFactory = new UntrustedCertClientFactory());
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
///
|
|
/// </summary>
|
|
/// <remarks>This service can validate without license check as the task which calls will be guarded</remarks>
|
|
/// <returns></returns>
|
|
public async Task CheckExternalAccessTokens()
|
|
{
|
|
// Validate AniList
|
|
var users = await _unitOfWork.UserRepository.GetAllUsersAsync();
|
|
foreach (var user in users)
|
|
{
|
|
if (string.IsNullOrEmpty(user.AniListAccessToken) || !_tokenService.HasTokenExpired(user.AniListAccessToken)) continue;
|
|
await _eventHub.SendMessageToAsync(MessageFactory.ScrobblingKeyExpired,
|
|
MessageFactory.ScrobblingKeyExpiredEvent(ScrobbleProvider.AniList), user.Id);
|
|
}
|
|
}
|
|
|
|
public async Task<bool> HasTokenExpired(int userId, ScrobbleProvider provider)
|
|
{
|
|
var token = await GetTokenForProvider(userId, provider);
|
|
|
|
if (await HasTokenExpired(token, provider))
|
|
{
|
|
// NOTE: Should this side effect be here?
|
|
await _eventHub.SendMessageToAsync(MessageFactory.ScrobblingKeyExpired,
|
|
MessageFactory.ScrobblingKeyExpiredEvent(ScrobbleProvider.AniList), userId);
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
private async Task<bool> HasTokenExpired(string token, ScrobbleProvider provider)
|
|
{
|
|
if (string.IsNullOrEmpty(token) ||
|
|
!_tokenService.HasTokenExpired(token)) return false;
|
|
|
|
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
|
|
if (string.IsNullOrEmpty(license.Value)) return true;
|
|
|
|
try
|
|
{
|
|
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/valid-key?provider=" + provider + "&key=" + token)
|
|
.WithHeader("Accept", "application/json")
|
|
.WithHeader("User-Agent", "Kavita")
|
|
.WithHeader("x-license-key", license.Value)
|
|
.WithHeader("x-installId", HashUtil.ServerToken())
|
|
.WithHeader("x-kavita-version", BuildInfo.Version)
|
|
.WithHeader("Content-Type", "application/json")
|
|
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
|
|
.GetStringAsync();
|
|
|
|
return bool.Parse(response);
|
|
}
|
|
catch (HttpRequestException e)
|
|
{
|
|
_logger.LogError(e, "An error happened during the request to Kavita+ API");
|
|
}
|
|
catch (Exception e)
|
|
{
|
|
_logger.LogError(e, "An error happened during the request to Kavita+ API");
|
|
}
|
|
|
|
return true;
|
|
}
|
|
|
|
private async Task<string> GetTokenForProvider(int userId, ScrobbleProvider provider)
|
|
{
|
|
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
|
if (user == null) return null;
|
|
|
|
return provider switch
|
|
{
|
|
ScrobbleProvider.AniList => user.AniListAccessToken,
|
|
_ => string.Empty
|
|
};
|
|
}
|
|
|
|
public async Task ScrobbleReviewUpdate(int userId, int seriesId, string reviewTitle, string reviewBody)
|
|
{
|
|
if (!await _licenseService.HasActiveLicense()) return;
|
|
var token = await GetTokenForProvider(userId, ScrobbleProvider.AniList);
|
|
if (await HasTokenExpired(token, ScrobbleProvider.AniList))
|
|
{
|
|
throw new KavitaException("AniList Credentials have expired or not set");
|
|
}
|
|
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Metadata | SeriesIncludes.Library);
|
|
if (series == null) throw new KavitaException("Series not found");
|
|
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId);
|
|
if (library is not {AllowScrobbling: true}) return;
|
|
|
|
var existingEvt = await _unitOfWork.ScrobbleRepository.GetEvent(userId, series.Id,
|
|
ScrobbleEventType.Review);
|
|
if (existingEvt is {IsProcessed: false})
|
|
{
|
|
_logger.LogDebug("Overriding scrobble event for {Series} from Review {Tagline}/{Body} -> {UpdatedTagline}{UpdatedBody}",
|
|
existingEvt.Series.Name, existingEvt.ReviewTitle, existingEvt.ReviewBody, reviewTitle, reviewBody);
|
|
existingEvt.ReviewBody = reviewBody;
|
|
existingEvt.ReviewTitle = reviewTitle;
|
|
_unitOfWork.ScrobbleRepository.Update(existingEvt);
|
|
await _unitOfWork.CommitAsync();
|
|
return;
|
|
}
|
|
|
|
var evt = new ScrobbleEvent()
|
|
{
|
|
SeriesId = series.Id,
|
|
LibraryId = series.LibraryId,
|
|
ScrobbleEventType = ScrobbleEventType.Review,
|
|
AniListId = (int?) ExtractId(series.Metadata.WebLinks, AniListWeblinkWebsite),
|
|
MalId = ExtractId(series.Metadata.WebLinks, MalWeblinkWebsite),
|
|
AppUserId = userId,
|
|
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
|
|
ReviewBody = reviewBody,
|
|
ReviewTitle = reviewTitle
|
|
};
|
|
_unitOfWork.ScrobbleRepository.Attach(evt);
|
|
await _unitOfWork.CommitAsync();
|
|
_logger.LogDebug("Added Scrobbling Review update on {SeriesName} with Userid {UserId} ", series.Name, userId);
|
|
}
|
|
|
|
public async Task ScrobbleRatingUpdate(int userId, int seriesId, int rating)
|
|
{
|
|
if (!await _licenseService.HasActiveLicense()) return;
|
|
var token = await GetTokenForProvider(userId, ScrobbleProvider.AniList);
|
|
if (await HasTokenExpired(token, ScrobbleProvider.AniList))
|
|
{
|
|
throw new KavitaException("AniList Credentials have expired or not set");
|
|
}
|
|
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Metadata | SeriesIncludes.Library);
|
|
if (series == null) throw new KavitaException("Series not found");
|
|
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId);
|
|
if (library is not {AllowScrobbling: true}) return;
|
|
|
|
var existingEvt = await _unitOfWork.ScrobbleRepository.GetEvent(userId, series.Id,
|
|
ScrobbleEventType.ScoreUpdated);
|
|
if (existingEvt is {IsProcessed: false})
|
|
{
|
|
// We need to just update Volume/Chapter number
|
|
_logger.LogDebug("Overriding scrobble event for {Series} from Rating {Rating} -> {UpdatedRating}",
|
|
existingEvt.Series.Name, existingEvt.Rating, rating);
|
|
existingEvt.Rating = rating;
|
|
_unitOfWork.ScrobbleRepository.Update(existingEvt);
|
|
await _unitOfWork.CommitAsync();
|
|
return;
|
|
}
|
|
|
|
var evt = new ScrobbleEvent()
|
|
{
|
|
SeriesId = series.Id,
|
|
LibraryId = series.LibraryId,
|
|
ScrobbleEventType = ScrobbleEventType.ScoreUpdated,
|
|
AniListId = (int?) ExtractId(series.Metadata.WebLinks, AniListWeblinkWebsite),
|
|
MalId = ExtractId(series.Metadata.WebLinks, MalWeblinkWebsite),
|
|
AppUserId = userId,
|
|
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
|
|
Rating = rating
|
|
};
|
|
_unitOfWork.ScrobbleRepository.Attach(evt);
|
|
await _unitOfWork.CommitAsync();
|
|
_logger.LogDebug("Added Scrobbling Rating update on {SeriesName} with Userid {UserId} ", series.Name, userId);
|
|
}
|
|
|
|
public async Task ScrobbleReadingUpdate(int userId, int seriesId)
|
|
{
|
|
if (!await _licenseService.HasActiveLicense()) return;
|
|
var token = await GetTokenForProvider(userId, ScrobbleProvider.AniList);
|
|
if (await HasTokenExpired(token, ScrobbleProvider.AniList))
|
|
{
|
|
throw new KavitaException("AniList Credentials have expired or not set");
|
|
}
|
|
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Metadata | SeriesIncludes.Library);
|
|
if (series == null) throw new KavitaException("Series not found");
|
|
if (await _unitOfWork.UserRepository.HasHoldOnSeries(userId, seriesId))
|
|
{
|
|
_logger.LogInformation("Series {SeriesName} is on UserId {UserId}'s hold list. Not scrobbling", series.Name, userId);
|
|
return;
|
|
}
|
|
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId);
|
|
if (library is not {AllowScrobbling: true}) return;
|
|
|
|
var existingEvt = await _unitOfWork.ScrobbleRepository.GetEvent(userId, series.Id,
|
|
ScrobbleEventType.ChapterRead);
|
|
if (existingEvt is {IsProcessed: false})
|
|
{
|
|
// We need to just update Volume/Chapter number
|
|
var prevChapter = $"{existingEvt.ChapterNumber}";
|
|
var prevVol = $"{existingEvt.VolumeNumber}";
|
|
|
|
existingEvt.VolumeNumber =
|
|
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(seriesId, userId);
|
|
existingEvt.ChapterNumber =
|
|
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(seriesId, userId);
|
|
_unitOfWork.ScrobbleRepository.Update(existingEvt);
|
|
await _unitOfWork.CommitAsync();
|
|
_logger.LogDebug("Overriding scrobble event for {Series} from vol {PrevVol} ch {PrevChap} -> vol {UpdatedVol} ch {UpdatedChap}",
|
|
existingEvt.Series.Name, prevVol, prevChapter, existingEvt.VolumeNumber, existingEvt.ChapterNumber);
|
|
return;
|
|
}
|
|
|
|
try
|
|
{
|
|
var evt = new ScrobbleEvent()
|
|
{
|
|
SeriesId = series.Id,
|
|
LibraryId = series.LibraryId,
|
|
ScrobbleEventType = ScrobbleEventType.ChapterRead,
|
|
AniListId = (int?) ExtractId(series.Metadata.WebLinks, AniListWeblinkWebsite),
|
|
MalId = ExtractId(series.Metadata.WebLinks, MalWeblinkWebsite),
|
|
AppUserId = userId,
|
|
VolumeNumber =
|
|
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(seriesId, userId),
|
|
ChapterNumber =
|
|
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(seriesId, userId),
|
|
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
|
|
};
|
|
_unitOfWork.ScrobbleRepository.Attach(evt);
|
|
await _unitOfWork.CommitAsync();
|
|
_logger.LogDebug("Added Scrobbling Read update on {SeriesName} with Userid {UserId} ", series.Name, userId);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "There was an issue when saving scrobble read event");
|
|
}
|
|
}
|
|
|
|
public async Task ScrobbleWantToReadUpdate(int userId, int seriesId, bool onWantToRead)
|
|
{
|
|
if (!await _licenseService.HasActiveLicense()) return;
|
|
var token = await GetTokenForProvider(userId, ScrobbleProvider.AniList);
|
|
if (await HasTokenExpired(token, ScrobbleProvider.AniList))
|
|
{
|
|
throw new KavitaException("AniList Credentials have expired or not set");
|
|
}
|
|
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId, SeriesIncludes.Metadata | SeriesIncludes.Library);
|
|
if (series == null) throw new KavitaException("Series not found");
|
|
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId);
|
|
if (library is not {AllowScrobbling: true}) return;
|
|
|
|
var existing = await _unitOfWork.ScrobbleRepository.Exists(userId, series.Id,
|
|
onWantToRead ? ScrobbleEventType.AddWantToRead : ScrobbleEventType.RemoveWantToRead);
|
|
if (existing) return;
|
|
|
|
var evt = new ScrobbleEvent()
|
|
{
|
|
SeriesId = series.Id,
|
|
LibraryId = series.LibraryId,
|
|
ScrobbleEventType = onWantToRead ? ScrobbleEventType.AddWantToRead : ScrobbleEventType.RemoveWantToRead,
|
|
AniListId = (int?) ExtractId(series.Metadata.WebLinks, AniListWeblinkWebsite),
|
|
MalId = ExtractId(series.Metadata.WebLinks, MalWeblinkWebsite),
|
|
AppUserId = userId,
|
|
Format = LibraryTypeHelper.GetFormat(series.Library.Type),
|
|
};
|
|
_unitOfWork.ScrobbleRepository.Attach(evt);
|
|
await _unitOfWork.CommitAsync();
|
|
_logger.LogDebug("Added Scrobbling WantToRead update on {SeriesName} with Userid {UserId} ", series.Name, userId);
|
|
}
|
|
|
|
private async Task<int> GetRateLimit(string license, string aniListToken)
|
|
{
|
|
try
|
|
{
|
|
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/rate-limit?accessToken=" + aniListToken)
|
|
.WithHeader("Accept", "application/json")
|
|
.WithHeader("User-Agent", "Kavita")
|
|
.WithHeader("x-license-key", license)
|
|
.WithHeader("x-installId", HashUtil.ServerToken())
|
|
.WithHeader("x-kavita-version", BuildInfo.Version)
|
|
.WithHeader("Content-Type", "application/json")
|
|
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
|
|
.GetStringAsync();
|
|
|
|
return int.Parse(response);
|
|
}
|
|
catch (Exception e)
|
|
{
|
|
_logger.LogError(e, "An error happened during the request to Kavita+ API");
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
private async Task<int> PostScrobbleUpdate(ScrobbleDto data, string license, ScrobbleEvent evt)
|
|
{
|
|
try
|
|
{
|
|
var response = await (Configuration.KavitaPlusApiUrl + "/api/scrobbling/update")
|
|
.WithHeader("Accept", "application/json")
|
|
.WithHeader("User-Agent", "Kavita")
|
|
.WithHeader("x-license-key", license)
|
|
.WithHeader("x-installId", HashUtil.ServerToken())
|
|
.WithHeader("x-kavita-version", BuildInfo.Version)
|
|
.WithHeader("Content-Type", "application/json")
|
|
.WithTimeout(TimeSpan.FromSeconds(Configuration.DefaultTimeOutSecs))
|
|
.PostJsonAsync(data)
|
|
.ReceiveJson<ScrobbleResponseDto>();
|
|
|
|
if (!response.Successful)
|
|
{
|
|
// Might want to log this under ScrobbleError
|
|
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Too Many Requests"))
|
|
{
|
|
_logger.LogInformation("Hit Too many requests, sleeping to regain requests");
|
|
await Task.Delay(TimeSpan.FromMinutes(1));
|
|
} else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series"))
|
|
{
|
|
// Log the Series name and Id in ScrobbleErrors
|
|
_logger.LogInformation("Kavita+ was unable to match the series");
|
|
if (!await _unitOfWork.ScrobbleRepository.HasErrorForSeries(evt.SeriesId))
|
|
{
|
|
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
|
|
{
|
|
Comment = "Unknown Series",
|
|
Details = data.SeriesName,
|
|
LibraryId = evt.LibraryId,
|
|
SeriesId = evt.SeriesId
|
|
});
|
|
}
|
|
} else if (response.ErrorMessage != null && response.ErrorMessage.StartsWith("Review"))
|
|
{
|
|
// Log the Series name and Id in ScrobbleErrors
|
|
_logger.LogInformation("Kavita+ was unable to save the review");
|
|
if (!await _unitOfWork.ScrobbleRepository.HasErrorForSeries(evt.SeriesId))
|
|
{
|
|
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
|
|
{
|
|
Comment = response.ErrorMessage,
|
|
Details = data.SeriesName,
|
|
LibraryId = evt.LibraryId,
|
|
SeriesId = evt.SeriesId
|
|
});
|
|
}
|
|
}
|
|
|
|
_logger.LogError("Scrobbling failed due to {ErrorMessage}: {SeriesName}", response.ErrorMessage, data.SeriesName);
|
|
throw new KavitaException($"Scrobbling failed due to {response.ErrorMessage}: {data.SeriesName}");
|
|
}
|
|
|
|
return response.RateLeft;
|
|
}
|
|
catch (FlurlHttpException ex)
|
|
{
|
|
_logger.LogError("Scrobbling to Kavita+ API failed due to error: {ErrorMessage}", ex.Message);
|
|
if (ex.Message.Contains("Call failed with status code 500 (Internal Server Error)"))
|
|
{
|
|
if (!await _unitOfWork.ScrobbleRepository.HasErrorForSeries(evt.SeriesId))
|
|
{
|
|
_unitOfWork.ScrobbleRepository.Attach(new ScrobbleError()
|
|
{
|
|
Comment = "Unknown Series",
|
|
Details = data.SeriesName,
|
|
LibraryId = evt.LibraryId,
|
|
SeriesId = evt.SeriesId
|
|
});
|
|
}
|
|
throw new KavitaException("Bad payload from Scrobble Provider");
|
|
}
|
|
throw;
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// This will back fill events from existing progress history, ratings, and want to read for users that have a valid license
|
|
/// </summary>
|
|
/// <param name="userId">Defaults to 0 meaning all users. Allows a userId to be set if a scrobble key is added to a user</param>
|
|
public async Task CreateEventsFromExistingHistory(int userId = 0)
|
|
{
|
|
var libAllowsScrobbling = (await _unitOfWork.LibraryRepository.GetLibrariesAsync())
|
|
.ToDictionary(lib => lib.Id, lib => lib.AllowScrobbling);
|
|
|
|
var userIds = (await _unitOfWork.UserRepository.GetAllUsersAsync())
|
|
.Where(l => userId == 0 || userId == l.Id)
|
|
.Select(u => u.Id);
|
|
foreach (var uId in userIds)
|
|
{
|
|
if (!await _licenseService.HasActiveLicense()) continue;
|
|
|
|
var wantToRead = await _unitOfWork.SeriesRepository.GetWantToReadForUserAsync(uId);
|
|
foreach (var wtr in wantToRead)
|
|
{
|
|
if (!libAllowsScrobbling[wtr.LibraryId]) continue;
|
|
await ScrobbleWantToReadUpdate(uId, wtr.Id, true);
|
|
}
|
|
|
|
var ratings = await _unitOfWork.UserRepository.GetSeriesWithRatings(uId);
|
|
foreach (var rating in ratings)
|
|
{
|
|
if (!libAllowsScrobbling[rating.Series.LibraryId]) continue;
|
|
await ScrobbleRatingUpdate(uId, rating.SeriesId, rating.Rating);
|
|
}
|
|
|
|
var reviews = await _unitOfWork.UserRepository.GetSeriesWithReviews(uId);
|
|
foreach (var review in reviews)
|
|
{
|
|
if (!libAllowsScrobbling[review.Series.LibraryId]) continue;
|
|
await ScrobbleReviewUpdate(uId, review.SeriesId, review.Tagline, review.Review);
|
|
}
|
|
|
|
var seriesWithProgress = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(0, uId,
|
|
new UserParams(), new FilterDto()
|
|
{
|
|
ReadStatus = new ReadStatus()
|
|
{
|
|
Read = true,
|
|
InProgress = true,
|
|
NotRead = false
|
|
},
|
|
Libraries = libAllowsScrobbling.Keys.Where(k => libAllowsScrobbling[k]).ToList()
|
|
});
|
|
|
|
foreach (var series in seriesWithProgress)
|
|
{
|
|
await ScrobbleReadingUpdate(uId, series.Id);
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
[DisableConcurrentExecution(60 * 60 * 60)]
|
|
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
|
public async Task ClearProcessedEvents()
|
|
{
|
|
var events = await _unitOfWork.ScrobbleRepository.GetProcessedEvents(7);
|
|
_unitOfWork.ScrobbleRepository.Remove(events);
|
|
await _unitOfWork.CommitAsync();
|
|
}
|
|
|
|
/// <summary>
|
|
/// This is a task that is ran on a fixed schedule (every few hours or every day) that clears out the scrobble event table
|
|
/// and offloads the data to the API server which performs the syncing to the providers.
|
|
/// </summary>
|
|
[DisableConcurrentExecution(60 * 60 * 60)]
|
|
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
|
public async Task ProcessUpdatesSinceLastSync()
|
|
{
|
|
// Check how many scrobbles we have available then only do those.
|
|
_logger.LogInformation("Starting Scrobble Processing");
|
|
var userRateLimits = new Dictionary<int, int>();
|
|
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
|
|
|
|
var progressCounter = 0;
|
|
|
|
var librariesWithScrobbling = (await _unitOfWork.LibraryRepository.GetLibrariesAsync())
|
|
.AsEnumerable()
|
|
.Where(l => l.AllowScrobbling)
|
|
.Select(l => l.Id)
|
|
.ToImmutableHashSet();
|
|
|
|
var errors = (await _unitOfWork.ScrobbleRepository.GetScrobbleErrors())
|
|
.Where(e => e.Comment == "Unknown Series")
|
|
.Select(e => e.SeriesId)
|
|
.ToList();
|
|
|
|
|
|
var readEvents = (await _unitOfWork.ScrobbleRepository.GetByEvent(ScrobbleEventType.ChapterRead))
|
|
.Where(e => librariesWithScrobbling.Contains(e.LibraryId))
|
|
.Where(e => !errors.Contains(e.SeriesId))
|
|
.ToList();
|
|
var addToWantToRead = (await _unitOfWork.ScrobbleRepository.GetByEvent(ScrobbleEventType.AddWantToRead))
|
|
.Where(e => librariesWithScrobbling.Contains(e.LibraryId))
|
|
.Where(e => !errors.Contains(e.SeriesId))
|
|
.ToList();
|
|
var removeWantToRead = (await _unitOfWork.ScrobbleRepository.GetByEvent(ScrobbleEventType.RemoveWantToRead))
|
|
.Where(e => librariesWithScrobbling.Contains(e.LibraryId))
|
|
.Where(e => !errors.Contains(e.SeriesId))
|
|
.ToList();
|
|
var ratingEvents = (await _unitOfWork.ScrobbleRepository.GetByEvent(ScrobbleEventType.ScoreUpdated))
|
|
.Where(e => librariesWithScrobbling.Contains(e.LibraryId))
|
|
.Where(e => !errors.Contains(e.SeriesId))
|
|
.ToList();
|
|
var reviewEvents = (await _unitOfWork.ScrobbleRepository.GetByEvent(ScrobbleEventType.Review))
|
|
.Where(e => librariesWithScrobbling.Contains(e.LibraryId))
|
|
.Where(e => !errors.Contains(e.SeriesId))
|
|
.ToList();
|
|
var decisions = addToWantToRead
|
|
.GroupBy(item => new { item.SeriesId, item.AppUserId })
|
|
.Select(group => new
|
|
{
|
|
group.Key.SeriesId,
|
|
UserId = group.Key.AppUserId,
|
|
Event = group.First(),
|
|
Decision = group.Count() - removeWantToRead
|
|
.Count(removeItem => removeItem.SeriesId == group.Key.SeriesId && removeItem.AppUserId == group.Key.AppUserId)
|
|
})
|
|
.Where(d => d.Decision > 0)
|
|
.Select(d => d.Event)
|
|
.ToList();
|
|
|
|
// For all userIds, ensure that we can connect and have access
|
|
var usersToScrobble = readEvents.Select(r => r.AppUser)
|
|
.Concat(addToWantToRead.Select(r => r.AppUser))
|
|
.Concat(removeWantToRead.Select(r => r.AppUser))
|
|
.Concat(ratingEvents.Select(r => r.AppUser))
|
|
.DistinctBy(u => u.Id)
|
|
.ToList();
|
|
foreach (var user in usersToScrobble)
|
|
{
|
|
await SetAndCheckRateLimit(userRateLimits, user, license.Value);
|
|
}
|
|
|
|
var totalProgress = readEvents.Count + addToWantToRead.Count + removeWantToRead.Count + ratingEvents.Count + decisions.Count + reviewEvents.Count;
|
|
|
|
_logger.LogInformation("Found {TotalEvents} Scrobble Events", totalProgress);
|
|
try
|
|
{
|
|
// Recalculate the highest volume/chapter
|
|
foreach (var readEvt in readEvents)
|
|
{
|
|
readEvt.VolumeNumber =
|
|
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadVolumeForSeries(readEvt.SeriesId,
|
|
readEvt.AppUser.Id);
|
|
readEvt.ChapterNumber =
|
|
await _unitOfWork.AppUserProgressRepository.GetHighestFullyReadChapterForSeries(readEvt.SeriesId,
|
|
readEvt.AppUser.Id);
|
|
_unitOfWork.ScrobbleRepository.Update(readEvt);
|
|
}
|
|
progressCounter = await ProcessEvents(readEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalProgress, evt => new ScrobbleDto()
|
|
{
|
|
Format = evt.Format,
|
|
AniListId = evt.AniListId,
|
|
MALId = (int?) evt.MalId,
|
|
ScrobbleEventType = evt.ScrobbleEventType,
|
|
ChapterNumber = evt.ChapterNumber,
|
|
VolumeNumber = evt.VolumeNumber,
|
|
AniListToken = evt.AppUser.AniListAccessToken,
|
|
SeriesName = evt.Series.Name,
|
|
LocalizedSeriesName = evt.Series.LocalizedName,
|
|
StartedReadingDateUtc = evt.CreatedUtc,
|
|
ScrobbleDateUtc = evt.LastModifiedUtc,
|
|
Year = evt.Series.Metadata.ReleaseYear
|
|
});
|
|
|
|
progressCounter = await ProcessEvents(ratingEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalProgress, evt => new ScrobbleDto()
|
|
{
|
|
Format = evt.Format,
|
|
AniListId = evt.AniListId,
|
|
MALId = (int?) evt.MalId,
|
|
ScrobbleEventType = evt.ScrobbleEventType,
|
|
AniListToken = evt.AppUser.AniListAccessToken,
|
|
SeriesName = evt.Series.Name,
|
|
LocalizedSeriesName = evt.Series.LocalizedName,
|
|
Rating = evt.Rating,
|
|
Year = evt.Series.Metadata.ReleaseYear
|
|
});
|
|
|
|
progressCounter = await ProcessEvents(reviewEvents, userRateLimits, usersToScrobble.Count, progressCounter, totalProgress, evt => new ScrobbleDto()
|
|
{
|
|
Format = evt.Format,
|
|
AniListId = evt.AniListId,
|
|
MALId = (int?) evt.MalId,
|
|
ScrobbleEventType = evt.ScrobbleEventType,
|
|
AniListToken = evt.AppUser.AniListAccessToken,
|
|
SeriesName = evt.Series.Name,
|
|
LocalizedSeriesName = evt.Series.LocalizedName,
|
|
Rating = evt.Rating,
|
|
Year = evt.Series.Metadata.ReleaseYear,
|
|
ReviewBody = evt.ReviewBody,
|
|
ReviewTitle = evt.ReviewTitle
|
|
});
|
|
|
|
progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter, totalProgress, evt => new ScrobbleDto()
|
|
{
|
|
Format = evt.Format,
|
|
AniListId = evt.AniListId,
|
|
MALId = (int?) evt.MalId,
|
|
ScrobbleEventType = evt.ScrobbleEventType,
|
|
ChapterNumber = evt.ChapterNumber,
|
|
VolumeNumber = evt.VolumeNumber,
|
|
AniListToken = evt.AppUser.AniListAccessToken,
|
|
SeriesName = evt.Series.Name,
|
|
LocalizedSeriesName = evt.Series.LocalizedName,
|
|
Year = evt.Series.Metadata.ReleaseYear
|
|
});
|
|
}
|
|
catch (FlurlHttpException)
|
|
{
|
|
_logger.LogError("Kavita+ API or a Scrobble service may be experiencing an outage. Stopping sending data");
|
|
return;
|
|
}
|
|
|
|
|
|
await SaveToDb(progressCounter, true);
|
|
_logger.LogInformation("Scrobbling Events is complete");
|
|
}
|
|
|
|
private async Task<int> ProcessEvents(IEnumerable<ScrobbleEvent> events, IDictionary<int, int> userRateLimits,
|
|
int usersToScrobble, int progressCounter, int totalProgress, Func<ScrobbleEvent, ScrobbleDto> createEvent)
|
|
{
|
|
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
|
|
foreach (var evt in events)
|
|
{
|
|
_logger.LogDebug("Processing Reading Events: {Count} / {Total}", progressCounter, totalProgress);
|
|
progressCounter++;
|
|
// Check if this media item can even be processed for this user
|
|
if (!DoesUserHaveProviderAndValid(evt)) continue;
|
|
var count = await SetAndCheckRateLimit(userRateLimits, evt.AppUser, license.Value);
|
|
if (count == 0)
|
|
{
|
|
if (usersToScrobble == 1) break;
|
|
continue;
|
|
}
|
|
|
|
try
|
|
{
|
|
var data = createEvent(evt);
|
|
userRateLimits[evt.AppUserId] = await PostScrobbleUpdate(data, license.Value, evt);
|
|
evt.IsProcessed = true;
|
|
evt.ProcessDateUtc = DateTime.UtcNow;
|
|
_unitOfWork.ScrobbleRepository.Update(evt);
|
|
}
|
|
catch (FlurlHttpException)
|
|
{
|
|
// If a flurl exception occured, the API is likely down. Kill processing
|
|
throw;
|
|
}
|
|
catch (Exception)
|
|
{
|
|
/* Swallow as it's already been handled in PostScrobbleUpdate */
|
|
}
|
|
await SaveToDb(progressCounter);
|
|
// We can use count to determine how long to sleep based on rate gain. It might be specific to AniList, but we can model others
|
|
var delay = count > 10 ? TimeSpan.FromMilliseconds(ScrobbleSleepTime) : TimeSpan.FromSeconds(60);
|
|
await Task.Delay(delay);
|
|
}
|
|
|
|
await SaveToDb(progressCounter, true);
|
|
return progressCounter;
|
|
}
|
|
|
|
private async Task SaveToDb(int progressCounter, bool force = false)
|
|
{
|
|
if (!force || progressCounter % 5 == 0)
|
|
{
|
|
_logger.LogDebug("Saving Progress");
|
|
await _unitOfWork.CommitAsync();
|
|
}
|
|
}
|
|
|
|
private static bool DoesUserHaveProviderAndValid(ScrobbleEvent readEvent)
|
|
{
|
|
var userProviders = GetUserProviders(readEvent.AppUser);
|
|
if (readEvent.Series.Library.Type == LibraryType.Manga && MangaProviders.Intersect(userProviders).Any())
|
|
{
|
|
return true;
|
|
}
|
|
|
|
if (readEvent.Series.Library.Type == LibraryType.Comic &&
|
|
ComicProviders.Intersect(userProviders).Any())
|
|
{
|
|
return true;
|
|
}
|
|
|
|
if (readEvent.Series.Library.Type == LibraryType.Book &&
|
|
BookProviders.Intersect(userProviders).Any())
|
|
{
|
|
return true;
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
private static IList<ScrobbleProvider> GetUserProviders(AppUser appUser)
|
|
{
|
|
var providers = new List<ScrobbleProvider>();
|
|
if (!string.IsNullOrEmpty(appUser.AniListAccessToken)) providers.Add(ScrobbleProvider.AniList);
|
|
return providers;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Extract an Id from a given weblink
|
|
/// </summary>
|
|
/// <param name="webLinks"></param>
|
|
/// <param name="website"></param>
|
|
/// <returns></returns>
|
|
public static long? ExtractId(string webLinks, string website)
|
|
{
|
|
var index = WeblinkExtractionMap[website];
|
|
foreach (var webLink in webLinks.Split(','))
|
|
{
|
|
if (!webLink.StartsWith(website)) continue;
|
|
var tokens = webLink.Split(website)[1].Split('/');
|
|
return long.Parse(tokens[index]);
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
private async Task<int> SetAndCheckRateLimit(IDictionary<int, int> userRateLimits, AppUser user, string license)
|
|
{
|
|
try
|
|
{
|
|
if (!userRateLimits.ContainsKey(user.Id))
|
|
{
|
|
var rate = await GetRateLimit(license, user.AniListAccessToken);
|
|
userRateLimits.Add(user.Id, rate);
|
|
}
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogInformation("User {UserName} had an issue figuring out rate: {Message}", user.UserName, ex.Message);
|
|
userRateLimits.Add(user.Id, 0);
|
|
}
|
|
|
|
userRateLimits.TryGetValue(user.Id, out var count);
|
|
if (count == 0)
|
|
{
|
|
_logger.LogInformation("User {UserName} is out of rate for Scrobbling", user.UserName);
|
|
}
|
|
|
|
return count;
|
|
}
|
|
|
|
public static string CreateUrl(string url, long? id)
|
|
{
|
|
if (id is null or 0) return string.Empty;
|
|
return $"{url}{id}/";
|
|
}
|
|
}
|