mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-05-24 00:52:23 -04:00
* Initial Canary Push (#2055) * Added AniList Token * Implemented the ability to set your AniList token. License check is not in place. * Added a check that validates AniList token is still valid. As I build out more support, I will add more checks. * Refactored the code to validate the license before allowing UI control to be edited. * Started license server stuff, but may need to change approach. Hooked up ability to scrobble rating events to KavitaPlus API. * Hooked in the ability to sync Mark Series as Read/Unread * Fixed up unit tests and only scrobble when a full chapter is read naturally. * Fixed up the Scrobbling service * Tweak one of the queries * Started an idea for Scrobble History, might rework into generic TaskHistory. * AniList Token now has a validation check. * Implemented a mechanism such that events are persisted to the database, processed every X hours to the API layer, then deleted from the database. * Hooked in code for want to read so we only send what's important. Will migrate these to bulk calls to lessen strain on API server. * Added some todos. Need to take a break. * Hooked up the ability to backfill scrobble events after turning it on. * Started on integrating license key into the server and ability to turn off scrobbling at the library level. Added sync history table for scrobbling and other API based information. * Started writing to sync table * Refactored the migrations to flatten them. Started working a basic license add flow and added in some of the cache. Lots to do. * Ensure that when we backfill scrobble events, we respect if a library has scrobbling turned on or not. * Hooked up the ability to send when the series was started to be read * Refactored the UI to streamline and group KavitaPlus Account Forms. * Aligning with API * Fixed bad merge * Fixed up inputting a user license. * Hooked up a cron task that validates licenses every 4 hours and on startup. * Reworked how the update license code works so that we always update the cache and we handle removing license from user. * Cleaned up some UI code * UserDto now has if there is a valid license or not. It's not exposed though as there is no need to expose the license key ever. * Fixed a strange encoding issue with extra ". Started working on having the UI aware of the license information. Refactored all code to properly pass the correct license to the API layer. * There is a circular dependency in the code. Fixed some theme code which wasn't checking the right variable. Reworked the JWT interceptor to be better at handling async code. Lots of misc code changes, DI circular issue is still present. * Fixed the DI issue and moved all things that need bootstrapping to app.component. * Hooked up the ability to not have a donation button show up if the server default user/admin has a valid KavitaPlus license. * Refactored how we extract out ids from weblinks * Ensure if API fails, we don't delete the record. * Refactored how rate checks occur for scrobbling processing. * Lots of testing and ensuring rate limit doesn't get destroyed. * Ensure the media item is valid for that user's providers set. * Refactored the loop code into one method to keep things much cleaner * Lots of code to get the scrobbling streamlined and foolproof. Unknown series are now reported on the UI. * Prevent duplicates for scrobble errors. * Ensure we are sending the correct type to the Scrobble Provider * Ensure we send the date of the scrobble event for upstream to use. * Replaced the dedicated run backfilling of scrobble events to just trigger when setting the anilist token for the first time. Streamlined a lot of the code for adding your license to ensure user understands how it works. * Fixed a bug where scan series wasn't triggering word count or cover generation. * Started the plumbing for recommendations * Merge conflicts * Recommendation plumbing is nearly complete. * Setup response caching and general cleanup * Fixed UI not showing the recommendation tab * Switched to prod url * Fixed broken unit tests due to Hangfire not being setup for unit tests * Fixed branch selection (#2056) * Damn you GA (#2058) * Bump versions by dotnet-bump-version. * Fixed GA not pulling the right branch and removed unneeded building from veresion job (#2060) * Bump versions by dotnet-bump-version. * Canary Second (#2071) * Just started * Started building the user review card. Fixed Recommendations not having user progress on them. * Fixed a bug where scrobbling ratings wasn't working. * Added a temp ability to trigger scrobbling processing for testing. * Cleaned up the design of review card. Added a temp way to trigger scrobbling. * Fixed clear scrobbling errors and refactored so reviews now load from DB and is streamlined. * Refactored so edit review is now a single module component and editable from the series detail page. * Removed SyncHistory table as it's no longer needed. Refactored read events to properly update to the latest progress information. Refactored to a new way of clearing events, so that user's can see their scrobble history. * Fixed a bug where Anilist token wouldn't show as set due to some state issue * Added the ability to see your own scrobble events * Avoid a potential collision with recommendations. * Fixed an issue where when checking for a license on UI, it wouldn't force the check (in case server was down on first check). * External reviews are implemented. * Fixed unit tests * Bump versions by dotnet-bump-version. * Made the api url dynamic based on dev more or not. (#2072) * Bump versions by dotnet-bump-version. * Canary Build 3 (#2079) * Updated reviews to have tagline support to match how Anilist has them. Cleaned up the KavitaPlus documentation and added a feature list. Review cards look much better. * Fixed up a NPE in scrobble event creation * Removed the ability to have images leak in the read more review card. Review's now show the user if they are a local user, else External. * Added caching to the reviews and recommendations that come from an external source. Max of 50MB will be used across whole instance. Entries are cached for 1 hour. * Reviews are looking much better * Added the ability for users to share their series reviews with other users on the server via a new opt-in mechanism. Fixed up some cache busting mechanism for reviews. * More review polish to align with better matching * Added the extra information for Recommendation matching. * Preview of the review is much cleaner now and the full body is styled better. * More anilist specific syntax * Fixed bad regex * Added the ability to bust cache. Spoilers are now implemented for reviews. Introduces: --review-spoiler-bg-color --review-spoiler-text-color * Bump versions by dotnet-bump-version. * Canary Build 4 (#2086) * Updated Kavita Plus feature list. Added a hover-over to the progress bars in the app to know exact percentage of reading for a chapter or series. * Added a button to go to external review. Changed how enums show in the documentation so you can see their string value too. Limited reviews to top 10 with proper ordering. Drastically cleaned up how we handle preview summary generation * Cleaned up the margin below review section * Fixed an issue where a processed scrobble event would get updated instead of a new event created. * By default, there is now a prompt on series review to add your own, which fills up the space nicely. Added the backend for Series Holds. * Scrobble History is now ordered by recent -> latest. Some minor cleanup in other files. * Added a simple way to see and toggle scrobble service from the series. * Fixed a bug where updating the user's last active time wasn't writing to database and causing a logout event. * Tweaked the registration email wording to be more clear for email field. * Improved OPDS Url generation and included using host name if defined. * Fixed the issues with choosing the correct series cover image. Added many unit tests to cover the edge cases. * Small cleanup * Fixed an issue where urls with , in them would break weblinks. * Fixed a bug where we weren't trying a png before we hit fallback for favicon parsing. * Ensure scrobbling tab isn't active without a license. Changed how updating user last active worked to supress more concurrency issues. * Fixed an issue where duplicate series could appear on newly added during a scan. * Bump versions by dotnet-bump-version. * Fixed a bad dto (#2087) * Bump versions by dotnet-bump-version. * Canary Build 4 (#2089) * New server-based auth is in place with the ability to register the instance. * Refactored to single install bound licensing. * Made the Kavita+ tab gold. * Change the JWTs to last 10 days. This is a self-hosted software and the usage doesn't need the level of 2 days expiration * Bump versions by dotnet-bump-version. * Canary Build 4 (#2090) * By default, a new library will only have scrobbling on if it's of type book or manga given current scrobble providers. * Started building out external reviews. * Added the ability to re-enter your license information. * Fixed side nav not extending enough * Fixed a bug with info cards * Integrated rating support, fixed review cards without a tagline, and misc fixes. * Streamlined where ratings are located on series detail page. * Aligned with other series lookups * Bump versions by dotnet-bump-version. * Canary Build 6 (#2092) * Cleaned up some messaging * Fixed up series detail * Cleanup * Bump versions by dotnet-bump-version. * Canary Build 6 (#2093) * Fixed scrobble token not being visible by default. * Added a loader for external reviews * Added the ability to edit series details (weblinks) from Scrobble Issues page. * Slightly lessened the focus on buttons * Fixed review cards so whenever you click your own review, it will open the edit modal. * Need for speed - Updated Kavita log to be much smaller and replaced all code ones with a 32x version. * Optimized a ton of our images to be much smaller and faster to load. * Added more MIME types for response compression * Edit Series modal name field should be readonly as it is directly mapped to file metadata or filename parsed. It shouldn't be changeable via the UI. * Removed the ability to update the Series name via Kavita UI/API as it is no longer editable. * Moved Image component to be standalone * Moved ReadMore component to be standalone * Moved PersonBadge component to be standalone * Moved IconAndTitle component to be standalone * Fixed some bugs with standalone. * Hooked in the ability to scrobble series reviews. * Refactored everything to use HashUtil token rather than InstallId. * Swapped over to a generated machine token and fixed an issue where after registering, the license would not say valid. * Added the missing migration for review scrobble events. * Clean up some wording around busting cache. * Fixed a bug where chapters within a volume could be unordered in the UI info screen. * Refactored to prepare for external series rendering on series detail. * Implemented external recs * Bump versions by dotnet-bump-version. * Canary Build 7 (#2097) * Aligned ExtractId to extract a long, since MAL id can be just that. * Fixed external series card not clicking correctly. Fixed a bug when extracting a Mal link. Fixed cancel button on license component. * Renamed user-license to license component given new direction for licensing. * Implemented card layout for recommendations * Moved more components over to be standalone and removed pipes module. This is going to take some time for sure. * Removed Cards and SharedCardsSideNav and SideNav over to standalone. This has been shaken out. * Cleaned up a bunch of extra space on reading list detail page. * Fixed rating popover not having a black triangle. * When checking license, show a loading indicator for validity icon. * Cache size can now be changed by admins if they want to give more memory for better browsing. * Added LastReadTime * Cleanup the scrobbling control text for Library Settings. * Fixed yet another edge case for getting series cover image where first volume is higher than 1 and the rest is just loose leaf chapters. * Changed OPDS Content Type to be application/atom+xml to align better with the spec. * Fixed unit tests * Bump versions by dotnet-bump-version. * Canary Build 7 (#2098) * Fixed the percentage readout on card item progress bar * Ensure scrobble control is always visible * Review card could show person icon in tablet viewport. * Changed how the ServerToken for node locking works as docker was giving different results each time. * After we update series metadata, bust cache * License componet cleanup on the styles * Moved license to admin module and removed feature modal as wiki is much easier to maintain. * Bump versions by dotnet-bump-version. * Canary Build 8 (#2100) * Fixed a very slight amount of the active nav tag bleeding outside the border radius * Switched how we count words in epub to handle languages that don't have spaces. * Updated dependencies and fixed a series cover image on list item view for recs. * Fixed a bug where external recs werent showing summary of the series. * Rewrote the rec loop to be cleaner * Added the ability to see series summary on series detail page on list view. Changed Scrobble Event page to show in server time and not utc. * Added tons of output to identify why unraid generates a new fingerprint each time. * Refactored scrobble event table to have filtering and pagination support. Fixed a few bad template issues and fixed loading scrobbling tab on refresh of page. * Aligned a few apis to use a default pagination rather than a higher level one. * Undo OPDS change as Chunky/Panels break. * Moved the holds code around * Don't show an empty review for the user, it eats up uneeded space and is ugly. * Cleaned up the review code * Fixed a bug with arrow on sortable table header. * More scrobbling debug information to ensure events are being processed correctly. * Applied a ton of code cleanup build warnings * Enhanced rec matching by prioritizing matching on weblinks before falling back to name matching. * Fixed the calculation of word count for epubs. * Bump versions by dotnet-bump-version. * Canary Build 9 (#2104) * Added another unit test * Changed how we create cover images to force the aspect ratio, which allows for Kavita to do some extra work later down the line. Prevents skewing from comic sources. * Code cleanup * Updated signatures to explicitly indicate they return a physical file. * Refactored the GA to be a bit more streamlined. * Fixed up how after cover conversion, how we refresh volume and series image links. * Undid the PhysicalFileResult stuff. * Fixed an issue in the epub reader where html tags within an anchor could break the navigation code for inner-links. * Fixed a bug in GetContinueChapter where a special could appear ahead of a loose leaf chapter. * Optimized aspect ratios for custom library images to avoid shift layout. Moved the series detail page down a bit to be inline with first row of actionables. * Finally fixed the media conversion issue where volumes and series wouldn't get their file links updated. * Added some new layout for license to allow a user to buy a sub after their last sub expired. * Added more metrics for fingerprinting to test on docker. * Tried to fix a bug with getnextchapter looping incorrectly, but unable to solve. * Cleanup some UI stuff to reduce bad calls. * Suppress annoying issues with reaching K+ when it's down (only affects local builds) * Fixed an edge case bug for picking the correct cover image for a series. * Fixed a bug where typeahead x wouldn't clear out the input field. * Renamed Clear -> Reset for metadata filter to be more informative of its function. * Don't allow duplicates for reading list characters. * Fixed a bug where when calculating recently updated, series with the same name but different libraries could get grouped. * Fixed an issue with fit to height where there could still be a small amount of scroll due to a timing issue with the image loading. * Don't show a loading if the user doesn't have a license for external ratings * Fixed bad stat url * Fixed up licensing to make it so you have to email me to get a sub renewed. * Updated deps * When scrobbling reading events, recalculate the highest chapter/volume during processing. * Code cleanup * Disabled some old test code that is likely not needed as it breaks a lot on netvips updates * Bump versions by dotnet-bump-version. * Canary Build 10 (#2105) * Aligned fingerprint to be unique * Updated email button to have a template * Fixed inability to progress to next chapter when last page is a spread and user is using split rendering. * Attempted fix at the column reader cutting off parts of the words. Can't fully reproduce, but added a bit of padding to help. * Aligned AniList icon to match that of weblinks. * Bump versions by dotnet-bump-version. * Canary Build 11 (#2108) * Fixed an issue with continuous reader in manga reader. * Aligned KavitaPlus->Kavita+ * Updated the readme * Adjusted first time registration messaging. * Fixed a bug where having just one type of weblink could cause a bad recommendation lookup * Removed manual invocation of scrobbling as testing is over for that feature. * Fixed a bad observerable for downloading logs from browser. * Don't get reviews/recs for comic libraries. Override user selection for scrobbling on Comics since there are no places to scrobble to. * Added a migration so all existing comic libraries will have scrobbling turned off. * Don't allow the UI to toggle scrobbling on a library with no providers. * Refactored the code to not throw generic 500 toasts on the UI. Added the ability to clear your license on Kavita side. * Converted reader settings to new accordion format. * Converted user preferences to new accordion format. * I couldn't convert CBL Reading modal to new accordion directives due to some weird bug. * Migrated the whole application to standalone components. This fixes the download progress bar not showing up. * Hooked up the ability to have reading list generate random items. Removed the old code as it's no longer needed. * Added random covers for collection's as well. * Added a speed up to not regenerate merged covers if we've already created them. * Fixed an issue where tooltips weren't styled correctly after updating a library. Migrated Library access modal to OnPush. * Fixed broken table styling. Fixed grid breakpoint css variables not using the ones from variables due to a missing import. * Misc fixes around tables and some api doc cleanup * Fixed a bug where when switching from webtoon back to a non-webtoon reading mode, if the browser size isn't large enough for double, the reader wouldn't go to single mode. * When combining external recs, normalize names to filter out differences, like capitalization. * Finally get to update ExCSS to the latest version! This adds much more css properties for epubs. * Ensure rejected reviews are saved as errors * A crap ton of code cleanup * Cleaned up some equality code in GenreHelper.cs * Fixed up the table styling after the bootstrap update changed it. * Bump versions by dotnet-bump-version. * Canary Build 12 (#2111) * Aligned GA (#2059) * Fixed the code around merging images to resize them. This will only look correct if this release's cover generation runs. * Misc code cleanup * Fixed an issue with epub column layout cutting off text * Collection detail page will now default sort by sort name. * Explicitly lazy load library icon images. * Make sure the full error message can be passed to the license component/user. * Use WhereIf in some places * Changed the hash util code for unraid again * Fixed up an issue with split render mode where last page wouldn't move into the next chapter. * Bump versions by dotnet-bump-version. * Don't ask me how, but i think I fixed the epub cutoff issue (#2112) * Bump versions by dotnet-bump-version. * Canary 14 (#2113) * Switched how we build the unraid fingerprint. * Fixed a bit of space below the image on fit to height * Removed some bad code * Bump versions by dotnet-bump-version. * Canary Build 15 (#2114) * When performing a scan series, force a recount of words/pages to ensure read time gets updated. * Fixed broken download logs button (develop) * Sped up the query for getting libraries and added caching for that api, which is helpful for users with larger library counts. * Fixed an issue in directory picker where if you had two folders with the same name, the 2nd to last wouldn't be clickable. * Added more destroy ref stuff. * Switched the buy/manage links over to be environment specific. * Bump versions by dotnet-bump-version. * Canary Build 16 (#2115) * Added the promo code for K+ and version bump. * Don't show see more if there isn't more to see on series detail. * Bump versions by dotnet-bump-version. * Last Build (#2116) * Merge * Close the view after removing a license key from server. * Bump versions by dotnet-bump-version. * Reset version to v0.7.4 for merge.
981 lines
41 KiB
C#
981 lines
41 KiB
C#
using System;
|
|
using System.Collections.Generic;
|
|
using System.IO;
|
|
using System.Linq;
|
|
using System.Threading.Tasks;
|
|
using System.Xml.Serialization;
|
|
using API.Comparators;
|
|
using API.Data;
|
|
using API.Data.Repositories;
|
|
using API.DTOs;
|
|
using API.DTOs.CollectionTags;
|
|
using API.DTOs.Filtering;
|
|
using API.DTOs.OPDS;
|
|
using API.DTOs.Search;
|
|
using API.Entities;
|
|
using API.Entities.Enums;
|
|
using API.Extensions;
|
|
using API.Helpers;
|
|
using API.Services;
|
|
using EasyCaching.Core;
|
|
using Kavita.Common;
|
|
using Microsoft.AspNetCore.Authorization;
|
|
using Microsoft.AspNetCore.Mvc;
|
|
using MimeTypes;
|
|
|
|
namespace API.Controllers;
|
|
|
|
#nullable enable
|
|
|
|
[AllowAnonymous]
|
|
public class OpdsController : BaseApiController
|
|
{
|
|
private readonly IUnitOfWork _unitOfWork;
|
|
private readonly IDownloadService _downloadService;
|
|
private readonly IDirectoryService _directoryService;
|
|
private readonly ICacheService _cacheService;
|
|
private readonly IReaderService _readerService;
|
|
private readonly ISeriesService _seriesService;
|
|
private readonly IAccountService _accountService;
|
|
|
|
|
|
private readonly XmlSerializer _xmlSerializer;
|
|
private readonly XmlSerializer _xmlOpenSearchSerializer;
|
|
private readonly FilterDto _filterDto = new FilterDto()
|
|
{
|
|
Formats = new List<MangaFormat>(),
|
|
Character = new List<int>(),
|
|
Colorist = new List<int>(),
|
|
Editor = new List<int>(),
|
|
Genres = new List<int>(),
|
|
Inker = new List<int>(),
|
|
Languages = new List<string>(),
|
|
Letterer = new List<int>(),
|
|
Penciller = new List<int>(),
|
|
Libraries = new List<int>(),
|
|
Publisher = new List<int>(),
|
|
Rating = 0,
|
|
Tags = new List<int>(),
|
|
Translators = new List<int>(),
|
|
Writers = new List<int>(),
|
|
AgeRating = new List<AgeRating>(),
|
|
CollectionTags = new List<int>(),
|
|
CoverArtist = new List<int>(),
|
|
ReadStatus = new ReadStatus(),
|
|
SortOptions = null,
|
|
PublicationStatus = new List<PublicationStatus>()
|
|
};
|
|
private readonly ChapterSortComparer _chapterSortComparer = ChapterSortComparer.Default;
|
|
private const int PageSize = 20;
|
|
|
|
public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService,
|
|
IDirectoryService directoryService, ICacheService cacheService,
|
|
IReaderService readerService, ISeriesService seriesService,
|
|
IAccountService accountService, IEasyCachingProvider provider)
|
|
{
|
|
_unitOfWork = unitOfWork;
|
|
_downloadService = downloadService;
|
|
_directoryService = directoryService;
|
|
_cacheService = cacheService;
|
|
_readerService = readerService;
|
|
_seriesService = seriesService;
|
|
_accountService = accountService;
|
|
|
|
_xmlSerializer = new XmlSerializer(typeof(Feed));
|
|
_xmlOpenSearchSerializer = new XmlSerializer(typeof(OpenSearchDescription));
|
|
}
|
|
|
|
[HttpPost("{apiKey}")]
|
|
[HttpGet("{apiKey}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> Get(string apiKey)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
|
|
var feed = CreateFeed("Kavita", string.Empty, apiKey, prefix);
|
|
SetFeedId(feed, "root");
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = "onDeck",
|
|
Title = "On Deck",
|
|
Content = new FeedEntryContent()
|
|
{
|
|
Text = "Browse by On Deck"
|
|
},
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/on-deck"),
|
|
}
|
|
});
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = "recentlyAdded",
|
|
Title = "Recently Added",
|
|
Content = new FeedEntryContent()
|
|
{
|
|
Text = "Browse by Recently Added"
|
|
},
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/recently-added"),
|
|
}
|
|
});
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = "readingList",
|
|
Title = "Reading Lists",
|
|
Content = new FeedEntryContent()
|
|
{
|
|
Text = "Browse by Reading Lists"
|
|
},
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/reading-list"),
|
|
}
|
|
});
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = "allLibraries",
|
|
Title = "All Libraries",
|
|
Content = new FeedEntryContent()
|
|
{
|
|
Text = "Browse by Libraries"
|
|
},
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/libraries"),
|
|
}
|
|
});
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = "allCollections",
|
|
Title = "All Collections",
|
|
Content = new FeedEntryContent()
|
|
{
|
|
Text = "Browse by Collections"
|
|
},
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/collections"),
|
|
}
|
|
});
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
private async Task<Tuple<string, string>> GetPrefix()
|
|
{
|
|
var baseUrl = (await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BaseUrl)).Value;
|
|
var prefix = "/api/opds/";
|
|
if (!Configuration.DefaultBaseUrl.Equals(baseUrl))
|
|
{
|
|
// We need to update the Prefix to account for baseUrl
|
|
prefix = baseUrl + "api/opds/";
|
|
}
|
|
|
|
return new Tuple<string, string>(baseUrl, prefix);
|
|
}
|
|
|
|
|
|
[HttpGet("{apiKey}/libraries")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetLibraries(string apiKey)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var libraries = await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId);
|
|
var feed = CreateFeed("All Libraries", $"{prefix}{apiKey}/libraries", apiKey, prefix);
|
|
SetFeedId(feed, "libraries");
|
|
foreach (var library in libraries)
|
|
{
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = library.Id.ToString(),
|
|
Title = library.Name,
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/libraries/{library.Id}"),
|
|
}
|
|
});
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/collections")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetCollections(string apiKey)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
|
if (user == null) return Unauthorized();
|
|
var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user);
|
|
|
|
var tags = isAdmin ? (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync())
|
|
: (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(userId));
|
|
|
|
|
|
var feed = CreateFeed("All Collections", $"{prefix}{apiKey}/collections", apiKey, prefix);
|
|
SetFeedId(feed, "collections");
|
|
foreach (var tag in tags)
|
|
{
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = tag.Id.ToString(),
|
|
Title = tag.Title,
|
|
Summary = tag.Summary,
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/collections/{tag.Id}"),
|
|
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"{baseUrl}api/image/collection-cover?collectionId={tag.Id}&apiKey={apiKey}"),
|
|
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"{baseUrl}api/image/collection-cover?collectionId={tag.Id}&apiKey={apiKey}")
|
|
}
|
|
});
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
|
|
[HttpGet("{apiKey}/collections/{collectionId}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetCollection(int collectionId, string apiKey, [FromQuery] int pageNumber = 0)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
|
if (user == null) return Unauthorized();
|
|
var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user);
|
|
|
|
IEnumerable <CollectionTagDto> tags;
|
|
if (isAdmin)
|
|
{
|
|
tags = await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync();
|
|
}
|
|
else
|
|
{
|
|
tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(userId);
|
|
}
|
|
|
|
var tag = tags.SingleOrDefault(t => t.Id == collectionId);
|
|
if (tag == null)
|
|
{
|
|
return BadRequest("Collection does not exist or you don't have access");
|
|
}
|
|
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForCollectionAsync(collectionId, userId, GetUserParams(pageNumber));
|
|
var seriesMetadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIds(series.Select(s => s.Id));
|
|
|
|
var feed = CreateFeed(tag.Title + " Collection", $"{prefix}{apiKey}/collections/{collectionId}", apiKey, prefix);
|
|
SetFeedId(feed, $"collections-{collectionId}");
|
|
AddPagination(feed, series, $"{prefix}{apiKey}/collections/{collectionId}");
|
|
|
|
foreach (var seriesDto in series)
|
|
{
|
|
feed.Entries.Add(CreateSeries(seriesDto, seriesMetadatas.First(s => s.SeriesId == seriesDto.Id), apiKey, prefix, baseUrl));
|
|
}
|
|
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/reading-list")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetReadingLists(string apiKey, [FromQuery] int pageNumber = 0)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
|
|
var readingLists = await _unitOfWork.ReadingListRepository.GetReadingListDtosForUserAsync(userId,
|
|
true, GetUserParams(pageNumber), false);
|
|
|
|
|
|
var feed = CreateFeed("All Reading Lists", $"{prefix}{apiKey}/reading-list", apiKey, prefix);
|
|
SetFeedId(feed, "reading-list");
|
|
foreach (var readingListDto in readingLists)
|
|
{
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = readingListDto.Id.ToString(),
|
|
Title = readingListDto.Title,
|
|
Summary = readingListDto.Summary,
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/reading-list/{readingListDto.Id}"),
|
|
}
|
|
});
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
private static UserParams GetUserParams(int pageNumber)
|
|
{
|
|
return new UserParams()
|
|
{
|
|
PageNumber = pageNumber,
|
|
PageSize = PageSize
|
|
};
|
|
}
|
|
|
|
[HttpGet("{apiKey}/reading-list/{readingListId}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetReadingListItems(int readingListId, string apiKey)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
|
|
|
var userWithLists = await _unitOfWork.UserRepository.GetUserByUsernameAsync(user!.UserName!, AppUserIncludes.ReadingListsWithItems);
|
|
if (userWithLists == null) return Unauthorized();
|
|
var readingList = userWithLists.ReadingLists.SingleOrDefault(t => t.Id == readingListId);
|
|
if (readingList == null)
|
|
{
|
|
return BadRequest("Reading list does not exist or you don't have access");
|
|
}
|
|
|
|
var feed = CreateFeed(readingList.Title + " Reading List", $"{prefix}{apiKey}/reading-list/{readingListId}", apiKey, prefix);
|
|
SetFeedId(feed, $"reading-list-{readingListId}");
|
|
|
|
var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId)).ToList();
|
|
foreach (var item in items)
|
|
{
|
|
feed.Entries.Add(
|
|
CreateChapter(apiKey, $"{item.Order} - {item.SeriesName}: {item.Title}",
|
|
string.Empty, item.ChapterId, item.VolumeId, item.SeriesId, prefix, baseUrl));
|
|
}
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/libraries/{libraryId}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetSeriesForLibrary(int libraryId, string apiKey, [FromQuery] int pageNumber = 0)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var library =
|
|
(await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId)).SingleOrDefault(l =>
|
|
l.Id == libraryId);
|
|
if (library == null)
|
|
{
|
|
return BadRequest("User does not have access to this library");
|
|
}
|
|
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, userId, GetUserParams(pageNumber), _filterDto);
|
|
var seriesMetadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIds(series.Select(s => s.Id));
|
|
|
|
var feed = CreateFeed(library.Name, $"{apiKey}/libraries/{libraryId}", apiKey, prefix);
|
|
SetFeedId(feed, $"library-{library.Name}");
|
|
AddPagination(feed, series, $"{prefix}{apiKey}/libraries/{libraryId}");
|
|
|
|
foreach (var seriesDto in series)
|
|
{
|
|
feed.Entries.Add(CreateSeries(seriesDto, seriesMetadatas.First(s => s.SeriesId == seriesDto.Id), apiKey, prefix, baseUrl));
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/recently-added")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetRecentlyAdded(string apiKey, [FromQuery] int pageNumber = 1)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var recentlyAdded = await _unitOfWork.SeriesRepository.GetRecentlyAdded(0, userId, GetUserParams(pageNumber), _filterDto);
|
|
var seriesMetadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIds(recentlyAdded.Select(s => s.Id));
|
|
|
|
var feed = CreateFeed("Recently Added", $"{prefix}{apiKey}/recently-added", apiKey, prefix);
|
|
SetFeedId(feed, "recently-added");
|
|
AddPagination(feed, recentlyAdded, $"{prefix}{apiKey}/recently-added");
|
|
|
|
foreach (var seriesDto in recentlyAdded)
|
|
{
|
|
feed.Entries.Add(CreateSeries(seriesDto, seriesMetadatas.First(s => s.SeriesId == seriesDto.Id), apiKey, prefix, baseUrl));
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/on-deck")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetOnDeck(string apiKey, [FromQuery] int pageNumber = 1)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
|
|
var userId = await GetUser(apiKey);
|
|
var userParams = GetUserParams(pageNumber);
|
|
var pagedList = await _unitOfWork.SeriesRepository.GetOnDeck(userId, 0, userParams, _filterDto);
|
|
var seriesMetadatas = await _unitOfWork.SeriesRepository.GetSeriesMetadataForIds(pagedList.Select(s => s.Id));
|
|
|
|
Response.AddPaginationHeader(pagedList.CurrentPage, pagedList.PageSize, pagedList.TotalCount, pagedList.TotalPages);
|
|
|
|
var feed = CreateFeed("On Deck", $"{prefix}{apiKey}/on-deck", apiKey, prefix);
|
|
SetFeedId(feed, "on-deck");
|
|
AddPagination(feed, pagedList, $"{prefix}{apiKey}/on-deck");
|
|
|
|
foreach (var seriesDto in pagedList)
|
|
{
|
|
feed.Entries.Add(CreateSeries(seriesDto, seriesMetadatas.First(s => s.SeriesId == seriesDto.Id), apiKey, prefix, baseUrl));
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/series")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> SearchSeries(string apiKey, [FromQuery] string query)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
|
|
|
|
if (string.IsNullOrEmpty(query))
|
|
{
|
|
return BadRequest("You must pass a query parameter");
|
|
}
|
|
query = query.Replace(@"%", string.Empty);
|
|
// Get libraries user has access to
|
|
var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(userId)).ToList();
|
|
if (!libraries.Any()) return BadRequest("User does not have access to any libraries");
|
|
|
|
var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user);
|
|
|
|
var series = await _unitOfWork.SeriesRepository.SearchSeries(userId, isAdmin, libraries.Select(l => l.Id).ToArray(), query);
|
|
|
|
var feed = CreateFeed(query, $"{prefix}{apiKey}/series?query=" + query, apiKey, prefix);
|
|
SetFeedId(feed, "search-series");
|
|
foreach (var seriesDto in series.Series)
|
|
{
|
|
feed.Entries.Add(CreateSeries(seriesDto, apiKey, prefix, baseUrl));
|
|
}
|
|
|
|
foreach (var collection in series.Collections)
|
|
{
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = collection.Id.ToString(),
|
|
Title = collection.Title,
|
|
Summary = collection.Summary,
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation,
|
|
$"{prefix}{apiKey}/collections/{collection.Id}"),
|
|
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image,
|
|
$"{baseUrl}api/image/collection-cover?collectionId={collection.Id}&apiKey={apiKey}"),
|
|
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image,
|
|
$"{baseUrl}api/image/collection-cover?collectionId={collection.Id}&apiKey={apiKey}")
|
|
}
|
|
});
|
|
}
|
|
|
|
foreach (var readingListDto in series.ReadingLists)
|
|
{
|
|
feed.Entries.Add(new FeedEntry()
|
|
{
|
|
Id = readingListDto.Id.ToString(),
|
|
Title = readingListDto.Title,
|
|
Summary = readingListDto.Summary,
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/reading-list/{readingListDto.Id}"),
|
|
}
|
|
});
|
|
}
|
|
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
private static void SetFeedId(Feed feed, string id)
|
|
{
|
|
feed.Id = id;
|
|
}
|
|
|
|
[HttpGet("{apiKey}/search")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetSearchDescriptor(string apiKey)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (_, prefix) = await GetPrefix();
|
|
var feed = new OpenSearchDescription()
|
|
{
|
|
ShortName = "Search",
|
|
Description = "Search for Series, Collections, or Reading Lists",
|
|
Url = new SearchLink()
|
|
{
|
|
Type = FeedLinkType.AtomAcquisition,
|
|
Template = $"{prefix}{apiKey}/series?query=" + "{searchTerms}"
|
|
}
|
|
};
|
|
|
|
await using var sm = new StringWriter();
|
|
_xmlOpenSearchSerializer.Serialize(sm, feed);
|
|
|
|
return CreateXmlResult(sm.ToString().Replace("utf-16", "utf-8"));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/series/{seriesId}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetSeries(string apiKey, int seriesId)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
|
|
|
|
var feed = CreateFeed(series.Name + " - Storyline", $"{prefix}{apiKey}/series/{series.Id}", apiKey, prefix);
|
|
SetFeedId(feed, $"series-{series.Id}");
|
|
feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={seriesId}&apiKey={apiKey}"));
|
|
|
|
var seriesDetail = await _seriesService.GetSeriesDetail(seriesId, userId);
|
|
foreach (var volume in seriesDetail.Volumes)
|
|
{
|
|
var chapters = (await _unitOfWork.ChapterRepository.GetChaptersAsync(volume.Id)).OrderBy(x => double.Parse(x.Number),
|
|
_chapterSortComparer);
|
|
|
|
foreach (var chapter in chapters)
|
|
{
|
|
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id);
|
|
var chapterTest = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapter.Id);
|
|
foreach (var mangaFile in files)
|
|
{
|
|
feed.Entries.Add(await CreateChapterWithFile(seriesId, volume.Id, chapter.Id, mangaFile, series, chapterTest, apiKey, prefix, baseUrl));
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
foreach (var storylineChapter in seriesDetail.StorylineChapters.Where(c => !c.IsSpecial))
|
|
{
|
|
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(storylineChapter.Id);
|
|
var chapterTest = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(storylineChapter.Id);
|
|
foreach (var mangaFile in files)
|
|
{
|
|
feed.Entries.Add(await CreateChapterWithFile(seriesId, storylineChapter.VolumeId, storylineChapter.Id, mangaFile, series, chapterTest, apiKey, prefix, baseUrl));
|
|
}
|
|
}
|
|
|
|
foreach (var special in seriesDetail.Specials)
|
|
{
|
|
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(special.Id);
|
|
var chapterTest = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(special.Id);
|
|
foreach (var mangaFile in files)
|
|
{
|
|
feed.Entries.Add(await CreateChapterWithFile(seriesId, special.VolumeId, special.Id, mangaFile, series, chapterTest, apiKey, prefix, baseUrl));
|
|
}
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetVolume(string apiKey, int seriesId, int volumeId)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
|
|
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId);
|
|
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
|
|
var chapters =
|
|
(await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number),
|
|
_chapterSortComparer);
|
|
var feed = CreateFeed(series.Name + " - Volume " + volume!.Name + $" - {SeriesService.FormatChapterName(libraryType)}s ",
|
|
$"{prefix}{apiKey}/series/{seriesId}/volume/{volumeId}", apiKey, prefix);
|
|
SetFeedId(feed, $"series-{series.Id}-volume-{volume.Id}-{SeriesService.FormatChapterName(libraryType)}s");
|
|
foreach (var chapter in chapters)
|
|
{
|
|
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapter.Id);
|
|
var chapterTest = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapter.Id);
|
|
foreach (var mangaFile in files)
|
|
{
|
|
feed.Entries.Add(await CreateChapterWithFile(seriesId, volumeId, chapter.Id, mangaFile, series, chapterTest, apiKey, prefix, baseUrl));
|
|
}
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
[HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}")]
|
|
[Produces("application/xml")]
|
|
public async Task<IActionResult> GetChapter(string apiKey, int seriesId, int volumeId, int chapterId)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var (baseUrl, prefix) = await GetPrefix();
|
|
var userId = await GetUser(apiKey);
|
|
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
|
|
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId);
|
|
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId);
|
|
if (chapter == null) return BadRequest("Chapter doesn't exist");
|
|
var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
|
|
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
|
|
|
|
var feed = CreateFeed(series.Name + " - Volume " + volume!.Name + $" - {SeriesService.FormatChapterName(libraryType)}s",
|
|
$"{prefix}{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}", apiKey, prefix);
|
|
SetFeedId(feed, $"series-{series.Id}-volume-{volumeId}-{SeriesService.FormatChapterName(libraryType)}-{chapterId}-files");
|
|
foreach (var mangaFile in files)
|
|
{
|
|
feed.Entries.Add(await CreateChapterWithFile(seriesId, volumeId, chapterId, mangaFile, series, chapter, apiKey, prefix, baseUrl));
|
|
}
|
|
|
|
return CreateXmlResult(SerializeXml(feed));
|
|
}
|
|
|
|
/// <summary>
|
|
/// Downloads a file
|
|
/// </summary>
|
|
/// <param name="apiKey">User's API Key</param>
|
|
/// <param name="seriesId"></param>
|
|
/// <param name="volumeId"></param>
|
|
/// <param name="chapterId"></param>
|
|
/// <param name="filename">Not used. Only for Chunky to allow download links</param>
|
|
/// <returns></returns>
|
|
[HttpGet("{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}/download/{filename}")]
|
|
public async Task<ActionResult> DownloadFile(string apiKey, int seriesId, int volumeId, int chapterId, string filename)
|
|
{
|
|
if (!(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()).EnableOpds)
|
|
return BadRequest("OPDS is not enabled on this server");
|
|
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(await GetUser(apiKey));
|
|
if (!await _accountService.HasDownloadPermission(user))
|
|
{
|
|
return BadRequest("User does not have download permissions");
|
|
}
|
|
|
|
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
|
|
var (zipFile, contentType, fileDownloadName) = _downloadService.GetFirstFileDownload(files);
|
|
return PhysicalFile(zipFile, contentType, fileDownloadName, true);
|
|
}
|
|
|
|
private static ContentResult CreateXmlResult(string xml)
|
|
{
|
|
return new ContentResult
|
|
{
|
|
ContentType = "application/xml",
|
|
Content = xml,
|
|
StatusCode = 200
|
|
};
|
|
}
|
|
|
|
private static void AddPagination(Feed feed, PagedList<SeriesDto> list, string href)
|
|
{
|
|
var url = href;
|
|
if (href.Contains('?'))
|
|
{
|
|
url += "&";
|
|
}
|
|
else
|
|
{
|
|
url += "?";
|
|
}
|
|
|
|
var pageNumber = Math.Max(list.CurrentPage, 1);
|
|
|
|
if (pageNumber > 1)
|
|
{
|
|
feed.Links.Add(CreateLink(FeedLinkRelation.Prev, FeedLinkType.AtomNavigation, url + "pageNumber=" + (pageNumber - 1)));
|
|
}
|
|
|
|
if (pageNumber + 1 <= list.TotalPages)
|
|
{
|
|
feed.Links.Add(CreateLink(FeedLinkRelation.Next, FeedLinkType.AtomNavigation, url + "pageNumber=" + (pageNumber + 1)));
|
|
}
|
|
|
|
// Update self to point to current page
|
|
var selfLink = feed.Links.SingleOrDefault(l => l.Rel == FeedLinkRelation.Self);
|
|
if (selfLink != null)
|
|
{
|
|
selfLink.Href = url + "pageNumber=" + pageNumber;
|
|
}
|
|
|
|
|
|
feed.Total = list.TotalCount;
|
|
feed.ItemsPerPage = list.PageSize;
|
|
feed.StartIndex = (Math.Max(list.CurrentPage - 1, 0) * list.PageSize) + 1;
|
|
}
|
|
|
|
private static FeedEntry CreateSeries(SeriesDto seriesDto, SeriesMetadataDto metadata, string apiKey, string prefix, string baseUrl)
|
|
{
|
|
return new FeedEntry()
|
|
{
|
|
Id = seriesDto.Id.ToString(),
|
|
Title = $"{seriesDto.Name} ({seriesDto.Format})",
|
|
Summary = seriesDto.Summary,
|
|
Authors = metadata.Writers.Select(p => new FeedAuthor()
|
|
{
|
|
Name = p.Name,
|
|
Uri = "http://opds-spec.org/author/" + p.Id
|
|
}).ToList(),
|
|
Categories = metadata.Genres.Select(g => new FeedCategory()
|
|
{
|
|
Label = g.Title,
|
|
Term = string.Empty
|
|
}).ToList(),
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/series/{seriesDto.Id}"),
|
|
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={seriesDto.Id}&apiKey={apiKey}"),
|
|
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={seriesDto.Id}&apiKey={apiKey}")
|
|
}
|
|
};
|
|
}
|
|
|
|
private static FeedEntry CreateSeries(SearchResultDto searchResultDto, string apiKey, string prefix, string baseUrl)
|
|
{
|
|
return new FeedEntry()
|
|
{
|
|
Id = searchResultDto.SeriesId.ToString(),
|
|
Title = $"{searchResultDto.Name} ({searchResultDto.Format})",
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}/series/{searchResultDto.SeriesId}"),
|
|
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={searchResultDto.SeriesId}&apiKey={apiKey}"),
|
|
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"{baseUrl}api/image/series-cover?seriesId={searchResultDto.SeriesId}&apiKey={apiKey}")
|
|
}
|
|
};
|
|
}
|
|
|
|
private static FeedEntry CreateChapter(string apiKey, string title, string summary, int chapterId, int volumeId, int seriesId, string prefix, string baseUrl)
|
|
{
|
|
return new FeedEntry()
|
|
{
|
|
Id = chapterId.ToString(),
|
|
Title = title,
|
|
Summary = summary ?? string.Empty,
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation,
|
|
$"{prefix}{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}"),
|
|
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image,
|
|
$"{baseUrl}api/image/chapter-cover?chapterId={chapterId}&apiKey={apiKey}"),
|
|
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image,
|
|
$"{baseUrl}api/image/chapter-cover?chapterId={chapterId}&apiKey={apiKey}")
|
|
}
|
|
};
|
|
}
|
|
|
|
private async Task<FeedEntry> CreateChapterWithFile(int seriesId, int volumeId, int chapterId, MangaFile mangaFile, SeriesDto series, ChapterDto chapter, string apiKey, string prefix, string baseUrl)
|
|
{
|
|
var fileSize =
|
|
mangaFile.Bytes > 0 ? DirectoryService.GetHumanReadableBytes(mangaFile.Bytes) :
|
|
DirectoryService.GetHumanReadableBytes(_directoryService.GetTotalSize(new List<string>()
|
|
{mangaFile.FilePath}));
|
|
var fileType = _downloadService.GetContentTypeFromFile(mangaFile.FilePath);
|
|
var filename = Uri.EscapeDataString(Path.GetFileName(mangaFile.FilePath));
|
|
var libraryType = await _unitOfWork.LibraryRepository.GetLibraryTypeAsync(series.LibraryId);
|
|
var volume = await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(volumeId, await GetUser(apiKey));
|
|
|
|
|
|
var title = $"{series.Name}";
|
|
|
|
if (volume!.Chapters.Count == 1)
|
|
{
|
|
SeriesService.RenameVolumeName(volume.Chapters.First(), volume, libraryType);
|
|
if (volume.Name != "0")
|
|
{
|
|
title += $" - {volume.Name}";
|
|
}
|
|
}
|
|
else if (volume.Number != 0)
|
|
{
|
|
title = $"{series.Name} - Volume {volume.Name} - {SeriesService.FormatChapterTitle(chapter, libraryType)}";
|
|
}
|
|
else
|
|
{
|
|
title = $"{series.Name} - {SeriesService.FormatChapterTitle(chapter, libraryType)}";
|
|
}
|
|
|
|
// Chunky requires a file at the end. Our API ignores this
|
|
var accLink =
|
|
CreateLink(FeedLinkRelation.Acquisition, fileType,
|
|
$"{prefix}{apiKey}/series/{seriesId}/volume/{volumeId}/chapter/{chapterId}/download/{filename}",
|
|
filename);
|
|
accLink.TotalPages = chapter.Pages;
|
|
|
|
var entry = new FeedEntry()
|
|
{
|
|
Id = mangaFile.Id.ToString(),
|
|
Title = title,
|
|
Extent = fileSize,
|
|
Summary = $"{fileType.Split("/")[1]} - {fileSize}",
|
|
Format = mangaFile.Format.ToString(),
|
|
Links = new List<FeedLink>()
|
|
{
|
|
CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"{baseUrl}api/image/chapter-cover?chapterId={chapterId}&apiKey={apiKey}"),
|
|
CreateLink(FeedLinkRelation.Thumbnail, FeedLinkType.Image, $"{baseUrl}api/image/chapter-cover?chapterId={chapterId}&apiKey={apiKey}"),
|
|
// We can't not include acc link in the feed, panels doesn't work with just page streaming option. We have to block download directly
|
|
accLink,
|
|
await CreatePageStreamLink(series.LibraryId, seriesId, volumeId, chapterId, mangaFile, apiKey, prefix)
|
|
},
|
|
Content = new FeedEntryContent()
|
|
{
|
|
Text = fileType,
|
|
Type = "text"
|
|
}
|
|
};
|
|
|
|
return entry;
|
|
}
|
|
|
|
/// <summary>
|
|
/// This returns a streamed image following OPDS-PS v1.2
|
|
/// </summary>
|
|
/// <param name="apiKey"></param>
|
|
/// <param name="libraryId"></param>
|
|
/// <param name="seriesId"></param>
|
|
/// <param name="volumeId"></param>
|
|
/// <param name="chapterId"></param>
|
|
/// <param name="pageNumber"></param>
|
|
/// <returns></returns>
|
|
[HttpGet("{apiKey}/image")]
|
|
public async Task<ActionResult> GetPageStreamedImage(string apiKey, [FromQuery] int libraryId, [FromQuery] int seriesId, [FromQuery] int volumeId,[FromQuery] int chapterId, [FromQuery] int pageNumber)
|
|
{
|
|
if (pageNumber < 0) return BadRequest("Page cannot be less than 0");
|
|
var chapter = await _cacheService.Ensure(chapterId);
|
|
if (chapter == null) return BadRequest("There was an issue finding image file for reading");
|
|
|
|
try
|
|
{
|
|
var path = _cacheService.GetCachedPagePath(chapter.Id, pageNumber);
|
|
if (string.IsNullOrEmpty(path) || !System.IO.File.Exists(path)) return BadRequest($"No such image for page {pageNumber}");
|
|
|
|
var content = await _directoryService.ReadFileAsync(path);
|
|
var format = Path.GetExtension(path);
|
|
|
|
// Calculates SHA1 Hash for byte[]
|
|
Response.AddCacheHeader(content);
|
|
|
|
// Save progress for the user
|
|
await _readerService.SaveReadingProgress(new ProgressDto()
|
|
{
|
|
ChapterId = chapterId,
|
|
PageNum = pageNumber,
|
|
SeriesId = seriesId,
|
|
VolumeId = volumeId,
|
|
LibraryId =libraryId
|
|
}, await GetUser(apiKey));
|
|
|
|
return File(content, MimeTypeMap.GetMimeType(format));
|
|
}
|
|
catch (Exception)
|
|
{
|
|
_cacheService.CleanupChapters(new []{ chapterId });
|
|
throw;
|
|
}
|
|
}
|
|
|
|
[HttpGet("{apiKey}/favicon")]
|
|
[ResponseCache(Duration = 60 * 60, Location = ResponseCacheLocation.Client, NoStore = false)]
|
|
public async Task<ActionResult> GetFavicon(string apiKey)
|
|
{
|
|
var files = _directoryService.GetFilesWithExtension(Path.Join(Directory.GetCurrentDirectory(), ".."), @"\.ico");
|
|
if (files.Length == 0) return BadRequest("Cannot find icon");
|
|
var path = files[0];
|
|
var content = await _directoryService.ReadFileAsync(path);
|
|
var format = Path.GetExtension(path);
|
|
|
|
return File(content, MimeTypeMap.GetMimeType(format));
|
|
}
|
|
|
|
/// <summary>
|
|
/// Gets the user from the API key
|
|
/// </summary>
|
|
/// <returns></returns>
|
|
private async Task<int> GetUser(string apiKey)
|
|
{
|
|
try
|
|
{
|
|
var user = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(apiKey);
|
|
return user;
|
|
}
|
|
catch
|
|
{
|
|
/* Do nothing */
|
|
}
|
|
throw new KavitaException("User does not exist");
|
|
}
|
|
|
|
private async Task<FeedLink> CreatePageStreamLink(int libraryId, int seriesId, int volumeId, int chapterId, MangaFile mangaFile, string apiKey, string prefix)
|
|
{
|
|
var userId = await GetUser(apiKey);
|
|
var progress = await _unitOfWork.AppUserProgressRepository.GetUserProgressDtoAsync(chapterId, userId);
|
|
|
|
// TODO: Type could be wrong
|
|
var link = CreateLink(FeedLinkRelation.Stream, "image/jpeg",
|
|
$"{prefix}{apiKey}/image?libraryId={libraryId}&seriesId={seriesId}&volumeId={volumeId}&chapterId={chapterId}&pageNumber=" + "{pageNumber}");
|
|
link.TotalPages = mangaFile.Pages;
|
|
if (progress != null)
|
|
{
|
|
link.LastRead = progress.PageNum;
|
|
link.LastReadDate = progress.LastModifiedUtc;
|
|
}
|
|
link.IsPageStream = true;
|
|
return link;
|
|
}
|
|
|
|
private static FeedLink CreateLink(string rel, string type, string href, string? title = null)
|
|
{
|
|
return new FeedLink()
|
|
{
|
|
Rel = rel,
|
|
Href = href,
|
|
Type = type,
|
|
Title = string.IsNullOrEmpty(title) ? string.Empty : title
|
|
};
|
|
}
|
|
|
|
private static Feed CreateFeed(string title, string href, string apiKey, string prefix)
|
|
{
|
|
var link = CreateLink(FeedLinkRelation.Self, string.IsNullOrEmpty(href) ?
|
|
FeedLinkType.AtomNavigation :
|
|
FeedLinkType.AtomAcquisition, prefix + href);
|
|
|
|
return new Feed()
|
|
{
|
|
Title = title,
|
|
Icon = $"{prefix}{apiKey}/favicon",
|
|
Links = new List<FeedLink>()
|
|
{
|
|
link,
|
|
CreateLink(FeedLinkRelation.Start, FeedLinkType.AtomNavigation, $"{prefix}{apiKey}"),
|
|
CreateLink(FeedLinkRelation.Search, FeedLinkType.AtomSearch, $"{prefix}{apiKey}/search")
|
|
},
|
|
};
|
|
}
|
|
|
|
private string SerializeXml(Feed feed)
|
|
{
|
|
if (feed == null) return string.Empty;
|
|
using var sm = new StringWriter();
|
|
_xmlSerializer.Serialize(sm, feed);
|
|
return sm.ToString().Replace("utf-16", "utf-8"); // Chunky cannot accept UTF-16 feeds
|
|
}
|
|
}
|