Merge branch 'develop'

This commit is contained in:
Joseph Milazzo 2021-06-30 16:06:35 -05:00
commit 23ecd34717
96 changed files with 3441 additions and 449 deletions

12
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,12 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: kavita # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ["https://paypal.me/majora2007"]

View File

@ -0,0 +1,17 @@
name: Release messages to discord announcement channel
on:
release:
types:
- created
jobs:
run_main:
runs-on: ubuntu-18.04
name: Sends custom message
steps:
- name: Sending message
uses: nhevia/discord-styled-releases@main
with:
webhook_id: ${{ secrets.DISCORD_WEBHOOK_ID }}
webhook_token: ${{ secrets.DISCORD_WEBHOOK_TOKEN }}

View File

@ -1,4 +1,4 @@
name: CI to Docker Hub name: Build Nightly Docker
on: on:
push: push:
@ -13,12 +13,46 @@ jobs:
- name: Check Out Repo - name: Check Out Repo
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Check Out WebUI
uses: actions/checkout@v2
with:
repository: Kareadita/Kavita-webui
ref: develop
path: Kavita-webui/
- name: NodeJS to Compile WebUI
uses: actions/setup-node@v2.1.5
with:
node-version: '14'
- run: |
cd Kavita-webui/ || exit
echo 'Installing web dependencies'
npm install
echo 'Building UI'
npm run prod
echo 'Copying back to Kavita wwwroot'
rsync -a dist/ ../API/wwwroot/
cd ../ || exit
- name: Compile dotnet app
uses: actions/setup-dotnet@v1
with:
dotnet-version: '5.0.x'
- run: ./action-build.sh
- name: Login to Docker Hub - name: Login to Docker Hub
uses: docker/login-action@v1 uses: docker/login-action@v1
with: with:
username: ${{ secrets.DOCKER_HUB_USERNAME }} username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v1
@ -27,10 +61,19 @@ jobs:
id: docker_build id: docker_build
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
context: ./ context: .
file: ./Dockerfile platforms: linux/amd64,linux/arm/v7,linux/arm64
push: true push: true
tags: kizaing/kavita:nightly-amd64 tags: kizaing/kavita:nightly
- name: Image digest - name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }} run: echo ${{ steps.docker_build.outputs.digest }}
- name: Notify Discord
uses: rjstone/discord-webhook-notify@v1
with:
severity: info
description:
details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated'
text: A new nightly build has been released for docker.
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}

79
.github/workflows/stable-docker.yml vendored Normal file
View File

@ -0,0 +1,79 @@
name: Build Stable Docker
on:
push:
branches:
- 'main'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Check Out Repo
uses: actions/checkout@v2
- name: Check Out WebUI
uses: actions/checkout@v2
with:
repository: Kareadita/Kavita-webui
ref: main
path: Kavita-webui/
- name: NodeJS to Compile WebUI
uses: actions/setup-node@v2.1.5
with:
node-version: '14'
- run: |
cd Kavita-webui/ || exit
echo 'Installing web dependencies'
npm install
echo 'Building UI'
npm run prod
echo 'Copying back to Kavita wwwroot'
rsync -a dist/ ../API/wwwroot/
cd ../ || exit
- name: Compile dotnet app
uses: actions/setup-dotnet@v1
with:
dotnet-version: '5.0.x'
- run: ./action-build.sh
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/amd64,linux/arm/v7,linux/arm64
push: true
tags: kizaing/kavita:latest
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
- name: Notify Discord
uses: rjstone/discord-webhook-notify@v1
with:
severity: info
description:
details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated'
text: A new stable build has been released for docker.
webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }}

1
.gitignore vendored
View File

@ -454,3 +454,4 @@ cache/
/API/temp/ /API/temp/
_temp/ _temp/
_output/ _output/
stats/

View File

@ -38,6 +38,14 @@ namespace API.Tests.Comparers
new[] {"Batman - Black white vol 1 #04.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr"}, new[] {"Batman - Black white vol 1 #04.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr"},
new[] {"Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #04.cbr"} new[] {"Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #04.cbr"}
)] )]
[InlineData(
new[] {"3and4.cbz", "The World God Only Knows - Oneshot.cbz", "5.cbz", "1and2.cbz"},
new[] {"1and2.cbz", "3and4.cbz", "5.cbz", "The World God Only Knows - Oneshot.cbz"}
)]
[InlineData(
new[] {"Solo Leveling - c000 (v01) - p000 [Cover] [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p001 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p002 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p003 [dig] [Yen Press] [LuCaZ].jpg"},
new[] {"Solo Leveling - c000 (v01) - p000 [Cover] [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p001 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p002 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p003 [dig] [Yen Press] [LuCaZ].jpg"}
)]
public void TestNaturalSortComparer(string[] input, string[] expected) public void TestNaturalSortComparer(string[] input, string[] expected)
{ {
Array.Sort(input, _nc); Array.Sort(input, _nc);

View File

@ -20,6 +20,8 @@ namespace API.Tests.Parser
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")]
[InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")]
[InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")]
[InlineData("Amazing Man Comics chapter 25", "Amazing Man Comics")]
[InlineData("Amazing Man Comics issue #25", "Amazing Man Comics")]
public void ParseComicSeriesTest(string filename, string expected) public void ParseComicSeriesTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename));
@ -40,6 +42,7 @@ namespace API.Tests.Parser
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")]
[InlineData("Superman v1 024 (09-10 1943)", "1")] [InlineData("Superman v1 024 (09-10 1943)", "1")]
[InlineData("Amazing Man Comics chapter 25", "0")]
public void ParseComicVolumeTest(string filename, string expected) public void ParseComicVolumeTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename));
@ -61,6 +64,7 @@ namespace API.Tests.Parser
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")]
[InlineData("Superman v1 024 (09-10 1943)", "24")] [InlineData("Superman v1 024 (09-10 1943)", "24")]
[InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")]
[InlineData("Amazing Man Comics chapter 25", "25")]
public void ParseComicChapterTest(string filename, string expected) public void ParseComicChapterTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename));

View File

@ -145,6 +145,7 @@ namespace API.Tests.Parser
[InlineData("X-Men v1 #201 (September 2007).cbz", "X-Men")] [InlineData("X-Men v1 #201 (September 2007).cbz", "X-Men")]
[InlineData("Kodoja #001 (March 2016)", "Kodoja")] [InlineData("Kodoja #001 (March 2016)", "Kodoja")]
[InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")] [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")]
[InlineData("Kiss x Sis - Ch.36 - A Cold Home Visit.cbz", "Kiss x Sis")]
public void ParseSeriesTest(string filename, string expected) public void ParseSeriesTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename));
@ -241,7 +242,9 @@ namespace API.Tests.Parser
[InlineData("Ani-Hina Art Collection.cbz", true)] [InlineData("Ani-Hina Art Collection.cbz", true)]
[InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)]
[InlineData("A Town Where You Live - Bonus Chapter.zip", true)] [InlineData("A Town Where You Live - Bonus Chapter.zip", true)]
[InlineData("Yuki Merry - 4-Komga Anthology", true)] [InlineData("Yuki Merry - 4-Komga Anthology", false)]
[InlineData("Beastars - SP01", false)]
[InlineData("Beastars SP01", false)]
public void ParseMangaSpecialTest(string input, bool expected) public void ParseMangaSpecialTest(string input, bool expected)
{ {
Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input)));

View File

@ -6,6 +6,16 @@ namespace API.Tests.Parser
public class ParserTests public class ParserTests
{ {
[Theory]
[InlineData("Beastars - SP01", true)]
[InlineData("Beastars SP01", true)]
[InlineData("Beastars Special 01", false)]
[InlineData("Beastars Extra 01", false)]
public void HasSpecialTest(string input, bool expected)
{
Assert.Equal(expected, HasSpecialMarker(input));
}
[Theory] [Theory]
[InlineData("0001", "1")] [InlineData("0001", "1")]
[InlineData("1", "1")] [InlineData("1", "1")]

View File

@ -16,11 +16,12 @@ namespace API.Tests.Services
private readonly ITestOutputHelper _testOutputHelper; private readonly ITestOutputHelper _testOutputHelper;
private readonly ArchiveService _archiveService; private readonly ArchiveService _archiveService;
private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>(); private readonly ILogger<ArchiveService> _logger = Substitute.For<ILogger<ArchiveService>>();
private readonly ILogger<DirectoryService> _directoryServiceLogger = Substitute.For<ILogger<DirectoryService>>();
public ArchiveServiceTests(ITestOutputHelper testOutputHelper) public ArchiveServiceTests(ITestOutputHelper testOutputHelper)
{ {
_testOutputHelper = testOutputHelper; _testOutputHelper = testOutputHelper;
_archiveService = new ArchiveService(_logger); _archiveService = new ArchiveService(_logger, new DirectoryService(_directoryServiceLogger));
} }
[Theory] [Theory]
@ -154,7 +155,7 @@ namespace API.Tests.Services
[InlineData("sorting.zip", "sorting.expected.jpg")] [InlineData("sorting.zip", "sorting.expected.jpg")]
public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile) public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile)
{ {
var archiveService = Substitute.For<ArchiveService>(_logger); var archiveService = Substitute.For<ArchiveService>(_logger, new DirectoryService(_directoryServiceLogger));
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages");
var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile));
archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default); archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default);
@ -174,7 +175,7 @@ namespace API.Tests.Services
[InlineData("sorting.zip", "sorting.expected.jpg")] [InlineData("sorting.zip", "sorting.expected.jpg")]
public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile) public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile)
{ {
var archiveService = Substitute.For<ArchiveService>(_logger); var archiveService = Substitute.For<ArchiveService>(_logger, new DirectoryService(_directoryServiceLogger));
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages");
var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile));

View File

@ -64,23 +64,147 @@
<ItemGroup> <ItemGroup>
<None Remove="Hangfire-log.db" /> <None Remove="Hangfire-log.db" />
<None Remove="obj\**" /> <None Remove="obj\**" />
<None Remove="wwwroot\**" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<Compile Remove="Interfaces\IMetadataService.cs" /> <Compile Remove="Interfaces\IMetadataService.cs" />
<Compile Remove="obj\**" /> <Compile Remove="obj\**" />
<Compile Remove="wwwroot\**" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<EmbeddedResource Remove="obj\**" /> <EmbeddedResource Remove="obj\**" />
<EmbeddedResource Remove="wwwroot\**" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<Content Remove="obj\**" /> <Content Remove="obj\**" />
<Content Remove="wwwroot\**" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<_ContentIncludedByDefault Remove="logs\kavita.json" /> <_ContentIncludedByDefault Remove="logs\kavita.json" />
<_ContentIncludedByDefault Remove="wwwroot\3rdpartylicenses.txt" />
<_ContentIncludedByDefault Remove="wwwroot\6.d9925ea83359bb4c7278.js" />
<_ContentIncludedByDefault Remove="wwwroot\6.d9925ea83359bb4c7278.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\7.860cdd6fd9d758e6c210.js" />
<_ContentIncludedByDefault Remove="wwwroot\7.860cdd6fd9d758e6c210.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\8.028f6737a2f0621d40c7.js" />
<_ContentIncludedByDefault Remove="wwwroot\8.028f6737a2f0621d40c7.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\EBGaramond-Italic-VariableFont_wght.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\EBGaramond-VariableFont_wght.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Black.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-BlackItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-BoldItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraBold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraBoldItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraLight.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraLightItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Italic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Light.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-LightItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Medium.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-MediumItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-SemiBold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-SemiBoldItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Thin.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ThinItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Black.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-BlackItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-BoldItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Italic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Light.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-LightItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Thin.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-ThinItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Italic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Italic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Black.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-BlackItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-BoldItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Italic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Light.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-LightItalic.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-ExtraBold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\Oswald-VariableFont_wght.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\README.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Bold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-ExtraLight.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Light.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Medium.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-SemiBold.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\RocknRoll_One\OFL.txt" />
<_ContentIncludedByDefault Remove="wwwroot\assets\fonts\RocknRoll_One\RocknRollOne-Regular.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder-min.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2-min.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.dark-min.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.dark.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder-min.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.dark-min.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.dark.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\images\preset-light.png" />
<_ContentIncludedByDefault Remove="wwwroot\assets\themes\dark.scss" />
<_ContentIncludedByDefault Remove="wwwroot\common.ad975892146299f80adb.js" />
<_ContentIncludedByDefault Remove="wwwroot\common.ad975892146299f80adb.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\EBGaramond-VariableFont_wght.2a1da2dbe7a28d63f8cb.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.0fea24969112a781acd2.eot" />
<_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.c967a94cfbe2b06627ff.woff2" />
<_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.dc2cbadd690e1d4b2c9c.woff" />
<_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.e33e2cf6e02cac2ccb77.svg" />
<_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.ec82f282c7f54b637098.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.06b9d19ced8d17f3d5cb.svg" />
<_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.08f9891a6f44d9546678.eot" />
<_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1008b5226941c24f4468.woff2" />
<_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1069ea55beaa01060302.woff" />
<_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1495f578452eb676f730.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.10ecefc282f2761808bf.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.371dbce0dd46bd4d2033.svg" />
<_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.3a24a60e7f9c6574864a.eot" />
<_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.3ceb50e7bcafb577367c.woff2" />
<_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.46fdbd2d897f8824e63c.woff" />
<_ContentIncludedByDefault Remove="wwwroot\favicon.ico" />
<_ContentIncludedByDefault Remove="wwwroot\FiraSans-Regular.1c0bf0728b51cb9f2ddc.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\index.html" />
<_ContentIncludedByDefault Remove="wwwroot\Lato-Regular.9919edff6283018571ad.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\LibreBaskerville-Regular.a27f99ca45522bb3d56d.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\main.44f5c0973044295d8be0.js" />
<_ContentIncludedByDefault Remove="wwwroot\main.44f5c0973044295d8be0.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\Merriweather-Regular.55c73e48e04ec926ebfe.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\NanumGothic-Regular.6c84540de7730f833d6c.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\polyfills.348e08e9d0e910a15938.js" />
<_ContentIncludedByDefault Remove="wwwroot\polyfills.348e08e9d0e910a15938.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\RocknRollOne-Regular.c75da4712d1e65ed1f69.ttf" />
<_ContentIncludedByDefault Remove="wwwroot\runtime.ea545c6916f85411478f.js" />
<_ContentIncludedByDefault Remove="wwwroot\runtime.ea545c6916f85411478f.js.map" />
<_ContentIncludedByDefault Remove="wwwroot\styles.4bd902bb3037f36f2c64.css" />
<_ContentIncludedByDefault Remove="wwwroot\styles.4bd902bb3037f36f2c64.css.map" />
<_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js" />
<_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js.map" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View File

@ -2,7 +2,7 @@
// Version 2 // Version 2
// Taken from: https://www.codeproject.com/Articles/11016/Numeric-String-Sort-in-C // Taken from: https://www.codeproject.com/Articles/11016/Numeric-String-Sort-in-C
using System; using static System.Char;
namespace API.Comparators namespace API.Comparators
{ {
@ -20,26 +20,26 @@ namespace API.Comparators
if (string.IsNullOrEmpty(s2)) return -1; if (string.IsNullOrEmpty(s2)) return -1;
//WE style, special case //WE style, special case
var sp1 = Char.IsLetterOrDigit(s1, 0); var sp1 = IsLetterOrDigit(s1, 0);
var sp2 = Char.IsLetterOrDigit(s2, 0); var sp2 = IsLetterOrDigit(s2, 0);
if(sp1 && !sp2) return 1; if(sp1 && !sp2) return 1;
if(!sp1 && sp2) return -1; if(!sp1 && sp2) return -1;
int i1 = 0, i2 = 0; //current index int i1 = 0, i2 = 0; //current index
while(true) while(true)
{ {
var c1 = Char.IsDigit(s1, i1); var c1 = IsDigit(s1, i1);
var c2 = Char.IsDigit(s2, i2); var c2 = IsDigit(s2, i2);
int r; // temp result int r; // temp result
if(!c1 && !c2) if(!c1 && !c2)
{ {
bool letter1 = Char.IsLetter(s1, i1); bool letter1 = IsLetter(s1, i1);
bool letter2 = Char.IsLetter(s2, i2); bool letter2 = IsLetter(s2, i2);
if((letter1 && letter2) || (!letter1 && !letter2)) if((letter1 && letter2) || (!letter1 && !letter2))
{ {
if(letter1 && letter2) if(letter1 && letter2)
{ {
r = Char.ToLower(s1[i1]).CompareTo(Char.ToLower(s2[i2])); r = ToLower(s1[i1]).CompareTo(ToLower(s2[i2]));
} }
else else
{ {
@ -114,8 +114,8 @@ namespace API.Comparators
{ {
nzStart = start; nzStart = start;
end = start; end = start;
bool countZeros = true; var countZeros = true;
while(Char.IsDigit(s, end)) while(IsDigit(s, end))
{ {
if(countZeros && s[end].Equals('0')) if(countZeros && s[end].Equals('0'))
{ {

View File

@ -0,0 +1,30 @@
using System;
namespace API.Configurations.CustomOptions
{
public class StatsOptions
{
public string ServerUrl { get; set; }
public string ServerSecret { get; set; }
public string SendDataAt { get; set; }
private const char Separator = ':';
public short SendDataHour => GetValueFromSendAt(0);
public short SendDataMinute => GetValueFromSendAt(1);
// The expected SendDataAt format is: Hour:Minute. Ex: 19:45
private short GetValueFromSendAt(int index)
{
var key = $"{nameof(StatsOptions)}:{nameof(SendDataAt)}";
if (string.IsNullOrEmpty(SendDataAt))
throw new InvalidOperationException($"{key} is invalid. Check the app settings file");
if (short.TryParse(SendDataAt.Split(Separator)[index], out var parsedValue))
return parsedValue;
throw new InvalidOperationException($"Could not parse {key}. Check the app settings file");
}
}
}

View File

@ -4,5 +4,9 @@
{ {
public const string AdminRole = "Admin"; public const string AdminRole = "Admin";
public const string PlebRole = "Pleb"; public const string PlebRole = "Pleb";
/// <summary>
/// Used to give a user ability to download files from the server
/// </summary>
public const string DownloadRole = "Download";
} }
} }

View File

@ -1,6 +1,7 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Reflection;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Constants; using API.Constants;
using API.DTOs; using API.DTOs;
@ -81,6 +82,8 @@ namespace API.Controllers
[HttpPost("register")] [HttpPost("register")]
public async Task<ActionResult<UserDto>> Register(RegisterDto registerDto) public async Task<ActionResult<UserDto>> Register(RegisterDto registerDto)
{
try
{ {
if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper())) if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper()))
{ {
@ -102,14 +105,17 @@ namespace API.Controllers
// When we register an admin, we need to grant them access to all Libraries. // When we register an admin, we need to grant them access to all Libraries.
if (registerDto.IsAdmin) if (registerDto.IsAdmin)
{ {
_logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", user.UserName); _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries",
user.UserName);
var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList();
foreach (var lib in libraries) foreach (var lib in libraries)
{ {
lib.AppUsers ??= new List<AppUser>(); lib.AppUsers ??= new List<AppUser>();
lib.AppUsers.Add(user); lib.AppUsers.Add(user);
} }
if (libraries.Any() && !await _unitOfWork.Complete()) _logger.LogError("There was an issue granting library access. Please do this manually");
if (libraries.Any() && !await _unitOfWork.CommitAsync())
_logger.LogError("There was an issue granting library access. Please do this manually");
} }
return new UserDto return new UserDto
@ -119,6 +125,14 @@ namespace API.Controllers
Preferences = _mapper.Map<UserPreferencesDto>(user.UserPreferences) Preferences = _mapper.Map<UserPreferencesDto>(user.UserPreferences)
}; };
} }
catch (Exception ex)
{
_logger.LogError(ex, "Something went wrong when registering user");
await _unitOfWork.RollbackAsync();
}
return BadRequest("Something went wrong when registering user");
}
[HttpPost("login")] [HttpPost("login")]
public async Task<ActionResult<UserDto>> Login(LoginDto loginDto) public async Task<ActionResult<UserDto>> Login(LoginDto loginDto)
@ -139,7 +153,7 @@ namespace API.Controllers
user.UserPreferences ??= new AppUserPreferences(); user.UserPreferences ??= new AppUserPreferences();
_unitOfWork.UserRepository.Update(user); _unitOfWork.UserRepository.Update(user);
await _unitOfWork.Complete(); await _unitOfWork.CommitAsync();
_logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive); _logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive);
@ -150,5 +164,50 @@ namespace API.Controllers
Preferences = _mapper.Map<UserPreferencesDto>(user.UserPreferences) Preferences = _mapper.Map<UserPreferencesDto>(user.UserPreferences)
}; };
} }
[HttpGet("roles")]
public ActionResult<IList<string>> GetRoles()
{
return typeof(PolicyConstants)
.GetFields(BindingFlags.Public | BindingFlags.Static)
.Where(f => f.FieldType == typeof(string))
.ToDictionary(f => f.Name,
f => (string) f.GetValue(null)).Values.ToList();
}
[HttpPost("update-rbs")]
public async Task<ActionResult> UpdateRoles(UpdateRbsDto updateRbsDto)
{
var user = await _userManager.Users
.Include(u => u.UserPreferences)
.SingleOrDefaultAsync(x => x.NormalizedUserName == updateRbsDto.Username.ToUpper());
if (updateRbsDto.Roles.Contains(PolicyConstants.AdminRole) ||
updateRbsDto.Roles.Contains(PolicyConstants.PlebRole))
{
return BadRequest("Invalid Roles");
}
var existingRoles = (await _userManager.GetRolesAsync(user))
.Where(s => s != PolicyConstants.AdminRole && s != PolicyConstants.PlebRole)
.ToList();
// Find what needs to be added and what needs to be removed
var rolesToRemove = existingRoles.Except(updateRbsDto.Roles);
var result = await _userManager.AddToRolesAsync(user, updateRbsDto.Roles);
if (!result.Succeeded)
{
await _unitOfWork.RollbackAsync();
return BadRequest("Something went wrong, unable to update user's roles");
}
if ((await _userManager.RemoveFromRolesAsync(user, rolesToRemove)).Succeeded)
{
return Ok();
}
await _unitOfWork.RollbackAsync();
return BadRequest("Something went wrong, unable to update user's roles");
}
} }
} }

View File

@ -186,6 +186,9 @@ namespace API.Controllers
var content = await contentFileRef.ReadContentAsync(); var content = await contentFileRef.ReadContentAsync();
if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content); if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content);
// In more cases than not, due to this being XML not HTML, we need to escape the script tags.
content = BookService.EscapeTags(content);
doc.LoadHtml(content); doc.LoadHtml(content);
var body = doc.DocumentNode.SelectSingleNode("//body"); var body = doc.DocumentNode.SelectSingleNode("//body");

View File

@ -1,4 +1,5 @@
using System.Collections.Generic; using System;
using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Constants; using API.Constants;
@ -9,7 +10,6 @@ using API.Interfaces;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace API.Controllers namespace API.Controllers
{ {
@ -33,13 +33,9 @@ namespace API.Controllers
{ {
return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync();
} }
else
{
return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync();
} }
}
[Authorize(Policy = "RequireAdminRole")] [Authorize(Policy = "RequireAdminRole")]
[HttpGet("search")] [HttpGet("search")]
public async Task<IEnumerable<CollectionTagDto>> SearchTags(string queryString) public async Task<IEnumerable<CollectionTagDto>> SearchTags(string queryString)
@ -64,7 +60,7 @@ namespace API.Controllers
if (_unitOfWork.HasChanges()) if (_unitOfWork.HasChanges())
{ {
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok("Tag updated successfully"); return Ok("Tag updated successfully");
} }
@ -80,6 +76,8 @@ namespace API.Controllers
[Authorize(Policy = "RequireAdminRole")] [Authorize(Policy = "RequireAdminRole")]
[HttpPost("update-series")] [HttpPost("update-series")]
public async Task<ActionResult> UpdateSeriesForTag(UpdateSeriesForTagDto updateSeriesForTagDto) public async Task<ActionResult> UpdateSeriesForTag(UpdateSeriesForTagDto updateSeriesForTagDto)
{
try
{ {
var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id);
if (tag == null) return BadRequest("Not a valid Tag"); if (tag == null) return BadRequest("Not a valid Tag");
@ -103,16 +101,18 @@ namespace API.Controllers
_unitOfWork.CollectionTagRepository.Remove(tag); _unitOfWork.CollectionTagRepository.Remove(tag);
} }
if (_unitOfWork.HasChanges() && await _unitOfWork.Complete()) if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{ {
return Ok("Tag updated"); return Ok("Tag updated");
} }
}
catch (Exception)
{
await _unitOfWork.RollbackAsync();
}
return BadRequest("Something went wrong. Please try again."); return BadRequest("Something went wrong. Please try again.");
} }
} }
} }

View File

@ -0,0 +1,135 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Kavita.Common;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.StaticFiles;
namespace API.Controllers
{
[Authorize(Policy = "RequireDownloadRole")]
public class DownloadController : BaseApiController
{
private readonly IUnitOfWork _unitOfWork;
private readonly IArchiveService _archiveService;
private readonly IDirectoryService _directoryService;
public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService)
{
_unitOfWork = unitOfWork;
_archiveService = archiveService;
_directoryService = directoryService;
}
[HttpGet("volume-size")]
public async Task<ActionResult<long>> GetVolumeSize(int volumeId)
{
var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId);
return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath)));
}
[HttpGet("chapter-size")]
public async Task<ActionResult<long>> GetChapterSize(int chapterId)
{
var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId);
return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath)));
}
[HttpGet("series-size")]
public async Task<ActionResult<long>> GetSeriesSize(int seriesId)
{
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath)));
}
[HttpGet("volume")]
public async Task<ActionResult> DownloadVolume(int volumeId)
{
var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId);
try
{
if (files.Count == 1)
{
return await GetFirstFileDownload(files);
}
var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath),
$"download_{User.GetUsername()}_v{volumeId}");
return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip");
}
catch (KavitaException ex)
{
return BadRequest(ex.Message);
}
}
private async Task<ActionResult> GetFirstFileDownload(IEnumerable<MangaFile> files)
{
var firstFile = files.Select(c => c.FilePath).First();
var fileProvider = new FileExtensionContentTypeProvider();
// Figures out what the content type should be based on the file name.
if (!fileProvider.TryGetContentType(firstFile, out var contentType))
{
contentType = Path.GetExtension(firstFile).ToLowerInvariant() switch
{
".cbz" => "application/zip",
".cbr" => "application/vnd.rar",
".cb7" => "application/x-compressed",
".epub" => "application/epub+zip",
".7z" => "application/x-7z-compressed",
".7zip" => "application/x-7z-compressed",
_ => contentType
};
}
return File(await _directoryService.ReadFileAsync(firstFile), contentType, Path.GetFileNameWithoutExtension(firstFile));
}
[HttpGet("chapter")]
public async Task<ActionResult> DownloadChapter(int chapterId)
{
var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId);
try
{
if (files.Count == 1)
{
return await GetFirstFileDownload(files);
}
var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath),
$"download_{User.GetUsername()}_c{chapterId}");
return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip");
}
catch (KavitaException ex)
{
return BadRequest(ex.Message);
}
}
[HttpGet("series")]
public async Task<ActionResult> DownloadSeries(int seriesId)
{
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
try
{
if (files.Count == 1)
{
return await GetFirstFileDownload(files);
}
var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath),
$"download_{User.GetUsername()}_s{seriesId}");
return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip");
}
catch (KavitaException ex)
{
return BadRequest(ex.Message);
}
}
}
}

View File

@ -67,7 +67,7 @@ namespace API.Controllers
} }
if (!await _unitOfWork.Complete()) return BadRequest("There was a critical issue. Please try again."); if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue. Please try again.");
_logger.LogInformation("Created a new library: {LibraryName}", library.Name); _logger.LogInformation("Created a new library: {LibraryName}", library.Name);
_taskScheduler.ScanLibrary(library.Id); _taskScheduler.ScanLibrary(library.Id);
@ -133,7 +133,7 @@ namespace API.Controllers
return Ok(_mapper.Map<MemberDto>(user)); return Ok(_mapper.Map<MemberDto>(user));
} }
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
_logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username);
return Ok(_mapper.Map<MemberDto>(user)); return Ok(_mapper.Map<MemberDto>(user));
@ -199,7 +199,7 @@ namespace API.Controllers
_unitOfWork.LibraryRepository.Update(library); _unitOfWork.LibraryRepository.Update(library);
if (!await _unitOfWork.Complete()) return BadRequest("There was a critical issue updating the library."); if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue updating the library.");
if (differenceBetweenFolders.Any()) if (differenceBetweenFolders.Any())
{ {
_taskScheduler.ScanLibrary(library.Id, true); _taskScheduler.ScanLibrary(library.Id, true);

View File

@ -5,6 +5,7 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
@ -50,14 +51,26 @@ namespace API.Controllers
return File(content, "image/" + format); return File(content, "image/" + format);
} }
[HttpGet("chapter-path")] [HttpGet("chapter-info")]
public async Task<ActionResult<string>> GetImagePath(int chapterId) public async Task<ActionResult<ChapterInfoDto>> GetChapterInfo(int chapterId)
{ {
var chapter = await _cacheService.Ensure(chapterId); var chapter = await _cacheService.Ensure(chapterId);
if (chapter == null) return BadRequest("There was an issue finding image file for reading"); if (chapter == null) return BadRequest("Could not find Chapter");
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId);
if (volume == null) return BadRequest("Could not find Volume");
var (_, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0); var (_, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0);
return Ok(mangaFile.FilePath); var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
return Ok(new ChapterInfoDto()
{
ChapterNumber = chapter.Range,
VolumeNumber = volume.Number + string.Empty,
VolumeId = volume.Id,
FileName = Path.GetFileName(mangaFile.FilePath),
SeriesName = series?.Name,
IsSpecial = chapter.IsSpecial,
Pages = chapter.Pages,
});
} }
[HttpGet("get-bookmark")] [HttpGet("get-bookmark")]
@ -116,7 +129,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(user); _unitOfWork.UserRepository.Update(user);
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok(); return Ok();
} }
@ -157,7 +170,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(user); _unitOfWork.UserRepository.Update(user);
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok(); return Ok();
} }
@ -198,7 +211,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(user); _unitOfWork.UserRepository.Update(user);
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok(); return Ok();
} }
@ -251,7 +264,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(user); _unitOfWork.UserRepository.Update(user);
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok(); return Ok();
} }
@ -272,20 +285,10 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id);
var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId);
var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId);
if (currentVolume.Number == 0) if (currentVolume.Number == 0)
{ {
var next = false; var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number);
foreach (var chapter in currentVolume.Chapters)
{
if (next)
{
return Ok(chapter.Id);
}
if (currentChapterId == chapter.Id) next = true;
}
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapterId);
if (chapterId > 0) return Ok(chapterId); if (chapterId > 0) return Ok(chapterId);
} }
@ -293,7 +296,7 @@ namespace API.Controllers
{ {
if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1) if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1)
{ {
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapterId); var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId); if (chapterId > 0) return Ok(chapterId);
} }
@ -305,7 +308,7 @@ namespace API.Controllers
return Ok(-1); return Ok(-1);
} }
private int GetNextChapterId(IEnumerable<Chapter> chapters, int currentChapterId) private static int GetNextChapterId(IEnumerable<Chapter> chapters, string currentChapterNumber)
{ {
var next = false; var next = false;
foreach (var chapter in chapters) foreach (var chapter in chapters)
@ -314,7 +317,7 @@ namespace API.Controllers
{ {
return chapter.Id; return chapter.Id;
} }
if (currentChapterId == chapter.Id) next = true; if (currentChapterNumber.Equals(chapter.Number)) next = true;
} }
return -1; return -1;
@ -333,11 +336,11 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id);
var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId);
var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId);
if (currentVolume.Number == 0) if (currentVolume.Number == 0)
{ {
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapterId); var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId); if (chapterId > 0) return Ok(chapterId);
} }
@ -345,7 +348,7 @@ namespace API.Controllers
{ {
if (volume.Number == currentVolume.Number) if (volume.Number == currentVolume.Number)
{ {
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapterId); var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId); if (chapterId > 0) return Ok(chapterId);
} }
if (volume.Number == currentVolume.Number - 1) if (volume.Number == currentVolume.Number - 1)

View File

@ -114,7 +114,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(user); _unitOfWork.UserRepository.Update(user);
if (!await _unitOfWork.Complete()) return BadRequest("There was a critical error."); if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical error.");
return Ok(); return Ok();
} }
@ -139,7 +139,7 @@ namespace API.Controllers
_unitOfWork.SeriesRepository.Update(series); _unitOfWork.SeriesRepository.Update(series);
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok(); return Ok();
} }
@ -189,6 +189,8 @@ namespace API.Controllers
[HttpPost("metadata")] [HttpPost("metadata")]
public async Task<ActionResult> UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto) public async Task<ActionResult> UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto)
{
try
{ {
var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId; var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId;
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
@ -242,10 +244,15 @@ namespace API.Controllers
return Ok("No changes to save"); return Ok("No changes to save");
} }
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok("Successfully updated"); return Ok("Successfully updated");
} }
}
catch (Exception)
{
await _unitOfWork.RollbackAsync();
}
return BadRequest("Could not update metadata"); return BadRequest("Could not update metadata");
} }

View File

@ -1,10 +1,9 @@
using System; using System;
using System.IO; using System.IO;
using System.IO.Compression;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using Kavita.Common;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
@ -19,17 +18,17 @@ namespace API.Controllers
private readonly IHostApplicationLifetime _applicationLifetime; private readonly IHostApplicationLifetime _applicationLifetime;
private readonly ILogger<ServerController> _logger; private readonly ILogger<ServerController> _logger;
private readonly IConfiguration _config; private readonly IConfiguration _config;
private readonly IDirectoryService _directoryService;
private readonly IBackupService _backupService; private readonly IBackupService _backupService;
private readonly IArchiveService _archiveService;
public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config, public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config,
IDirectoryService directoryService, IBackupService backupService) IBackupService backupService, IArchiveService archiveService)
{ {
_applicationLifetime = applicationLifetime; _applicationLifetime = applicationLifetime;
_logger = logger; _logger = logger;
_config = config; _config = config;
_directoryService = directoryService;
_backupService = backupService; _backupService = backupService;
_archiveService = archiveService;
} }
[HttpPost("restart")] [HttpPost("restart")]
@ -45,33 +44,17 @@ namespace API.Controllers
public async Task<ActionResult> GetLogs() public async Task<ActionResult> GetLogs()
{ {
var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName());
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var tempLocation = Path.Join(tempDirectory, "logs_" + dateString);
DirectoryService.ExistOrCreate(tempLocation);
if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
{
return BadRequest("Unable to copy files to temp directory for log download.");
}
var zipPath = Path.Join(tempDirectory, $"kavita_logs_{dateString}.zip");
try try
{ {
ZipFile.CreateFromDirectory(tempLocation, zipPath); var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files, "logs");
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue when archiving library backup");
return BadRequest("There was an issue when archiving library backup");
}
var fileBytes = await _directoryService.ReadFileAsync(zipPath);
DirectoryService.ClearAndDeleteDirectory(tempLocation);
(new FileInfo(zipPath)).Delete();
return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); return File(fileBytes, "application/zip", Path.GetFileName(zipPath));
} }
catch (KavitaException ex)
{
return BadRequest(ex.Message);
}
}
} }
} }

View File

@ -8,6 +8,7 @@ using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers.Converters; using API.Helpers.Converters;
using API.Interfaces; using API.Interfaces;
using Kavita.Common;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
@ -15,7 +16,7 @@ using Microsoft.Extensions.Logging;
namespace API.Controllers namespace API.Controllers
{ {
[Authorize] [Authorize(Policy = "RequireAdminRole")]
public class SettingsController : BaseApiController public class SettingsController : BaseApiController
{ {
private readonly ILogger<SettingsController> _logger; private readonly ILogger<SettingsController> _logger;
@ -34,10 +35,12 @@ namespace API.Controllers
[HttpGet("")] [HttpGet("")]
public async Task<ActionResult<ServerSettingDto>> GetSettings() public async Task<ActionResult<ServerSettingDto>> GetSettings()
{ {
return Ok(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()); var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
settingsDto.Port = Configuration.GetPort(Program.GetAppSettingFilename());
settingsDto.LoggingLevel = Configuration.GetLogLevel(Program.GetAppSettingFilename());
return Ok(settingsDto);
} }
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("")] [HttpPost("")]
public async Task<ActionResult<ServerSettingDto>> UpdateSettings(ServerSettingDto updateSettingsDto) public async Task<ActionResult<ServerSettingDto>> UpdateSettings(ServerSettingDto updateSettingsDto)
{ {
@ -76,47 +79,63 @@ namespace API.Controllers
if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + "" != setting.Value) if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + "" != setting.Value)
{ {
setting.Value = updateSettingsDto.Port + ""; setting.Value = updateSettingsDto.Port + "";
Environment.SetEnvironmentVariable("KAVITA_PORT", setting.Value); // Port is managed in appSetting.json
Configuration.UpdatePort(Program.GetAppSettingFilename(), updateSettingsDto.Port);
_unitOfWork.SettingsRepository.Update(setting); _unitOfWork.SettingsRepository.Update(setting);
} }
if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + "" != setting.Value) if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + "" != setting.Value)
{ {
setting.Value = updateSettingsDto.LoggingLevel + ""; setting.Value = updateSettingsDto.LoggingLevel + "";
Configuration.UpdateLogLevel(Program.GetAppSettingFilename(), updateSettingsDto.LoggingLevel);
_unitOfWork.SettingsRepository.Update(setting); _unitOfWork.SettingsRepository.Update(setting);
} }
if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + "" != setting.Value)
{
setting.Value = updateSettingsDto.AllowStatCollection + "";
_unitOfWork.SettingsRepository.Update(setting);
if (!updateSettingsDto.AllowStatCollection)
{
_taskScheduler.CancelStatsTasks();
}
else
{
_taskScheduler.ScheduleStatsTasks();
}
}
} }
_configuration.GetSection("Logging:LogLevel:Default").Value = updateSettingsDto.LoggingLevel + ""; _configuration.GetSection("Logging:LogLevel:Default").Value = updateSettingsDto.LoggingLevel + "";
if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated"); if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated");
if (!_unitOfWork.HasChanges() || !await _unitOfWork.Complete()) if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync())
{
await _unitOfWork.RollbackAsync();
return BadRequest("There was a critical issue. Please try again."); return BadRequest("There was a critical issue. Please try again.");
}
_logger.LogInformation("Server Settings updated"); _logger.LogInformation("Server Settings updated");
_taskScheduler.ScheduleTasks(); _taskScheduler.ScheduleTasks();
return Ok(updateSettingsDto); return Ok(updateSettingsDto);
} }
[Authorize(Policy = "RequireAdminRole")]
[HttpGet("task-frequencies")] [HttpGet("task-frequencies")]
public ActionResult<IEnumerable<string>> GetTaskFrequencies() public ActionResult<IEnumerable<string>> GetTaskFrequencies()
{ {
return Ok(CronConverter.Options); return Ok(CronConverter.Options);
} }
[Authorize(Policy = "RequireAdminRole")]
[HttpGet("library-types")] [HttpGet("library-types")]
public ActionResult<IEnumerable<string>> GetLibraryTypes() public ActionResult<IEnumerable<string>> GetLibraryTypes()
{ {
return Ok(Enum.GetNames(typeof(LibraryType))); return Ok(Enum.GetNames(typeof(LibraryType)));
} }
[Authorize(Policy = "RequireAdminRole")]
[HttpGet("log-levels")] [HttpGet("log-levels")]
public ActionResult<IEnumerable<string>> GetLogLevels() public ActionResult<IEnumerable<string>> GetLogLevels()
{ {
return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical", "None"}); return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical"});
} }
} }
} }

View File

@ -0,0 +1,40 @@
using System;
using System.Threading.Tasks;
using API.DTOs;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace API.Controllers
{
public class StatsController : BaseApiController
{
private readonly ILogger<StatsController> _logger;
private readonly IStatsService _statsService;
public StatsController(ILogger<StatsController> logger, IStatsService statsService)
{
_logger = logger;
_statsService = statsService;
}
[AllowAnonymous]
[HttpPost("client-info")]
public async Task<IActionResult> AddClientInfo([FromBody] ClientInfoDto clientInfoDto)
{
try
{
await _statsService.PathData(clientInfoDto);
return Ok();
}
catch (Exception e)
{
_logger.LogError(e, "Error updating the usage statistics");
Console.WriteLine(e);
throw;
}
}
}
}

View File

@ -26,7 +26,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username);
_unitOfWork.UserRepository.Delete(user); _unitOfWork.UserRepository.Delete(user);
if (await _unitOfWork.Complete()) return Ok(); if (await _unitOfWork.CommitAsync()) return Ok();
return BadRequest("Could not delete the user."); return BadRequest("Could not delete the user.");
} }
@ -61,6 +61,8 @@ namespace API.Controllers
existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; existingPreferences.ReadingDirection = preferencesDto.ReadingDirection;
existingPreferences.ScalingOption = preferencesDto.ScalingOption; existingPreferences.ScalingOption = preferencesDto.ScalingOption;
existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; existingPreferences.PageSplitOption = preferencesDto.PageSplitOption;
existingPreferences.AutoCloseMenu = preferencesDto.AutoCloseMenu;
existingPreferences.ReaderMode = preferencesDto.ReaderMode;
existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin;
existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing;
existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily;
@ -71,7 +73,7 @@ namespace API.Controllers
_unitOfWork.UserRepository.Update(existingPreferences); _unitOfWork.UserRepository.Update(existingPreferences);
if (await _unitOfWork.Complete()) if (await _unitOfWork.CommitAsync())
{ {
return Ok(preferencesDto); return Ok(preferencesDto);
} }

36
API/DTOs/ClientInfoDto.cs Normal file
View File

@ -0,0 +1,36 @@
using System;
namespace API.DTOs
{
public class ClientInfoDto
{
public ClientInfoDto()
{
CollectedAt = DateTime.UtcNow;
}
public string KavitaUiVersion { get; set; }
public string ScreenResolution { get; set; }
public string PlatformType { get; set; }
public DetailsVersion Browser { get; set; }
public DetailsVersion Os { get; set; }
public DateTime? CollectedAt { get; set; }
public bool IsTheSameDevice(ClientInfoDto clientInfoDto)
{
return (clientInfoDto.ScreenResolution ?? "").Equals(ScreenResolution) &&
(clientInfoDto.PlatformType ?? "").Equals(PlatformType) &&
(clientInfoDto.Browser?.Name ?? "").Equals(Browser?.Name) &&
(clientInfoDto.Os?.Name ?? "").Equals(Os?.Name) &&
clientInfoDto.CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd")
.Equals(CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd"));
}
}
public class DetailsVersion
{
public string Name { get; set; }
public string Version { get; set; }
}
}

View File

@ -1,6 +1,4 @@
using System.Collections.Generic; namespace API.DTOs
namespace API.DTOs
{ {
public class CollectionTagDto public class CollectionTagDto
{ {

View File

@ -0,0 +1,16 @@
namespace API.DTOs.Reader
{
public class ChapterInfoDto
{
public string ChapterNumber { get; set; }
public string VolumeNumber { get; set; }
public int VolumeId { get; set; }
public string SeriesName { get; set; }
public string ChapterTitle { get; set; } = "";
public int Pages { get; set; }
public string FileName { get; set; }
public bool IsSpecial { get; set; }
}
}

12
API/DTOs/ServerInfoDto.cs Normal file
View File

@ -0,0 +1,12 @@
namespace API.DTOs
{
public class ServerInfoDto
{
public string Os { get; set; }
public string DotNetVersion { get; set; }
public string RunTimeVersion { get; set; }
public string KavitaVersion { get; set; }
public string BuildBranch { get; set; }
public string Culture { get; set; }
}
}

View File

@ -7,5 +7,6 @@
public string LoggingLevel { get; set; } public string LoggingLevel { get; set; }
public string TaskBackup { get; set; } public string TaskBackup { get; set; }
public int Port { get; set; } public int Port { get; set; }
public bool AllowStatCollection { get; set; }
} }
} }

10
API/DTOs/UpdateRBSDto.cs Normal file
View File

@ -0,0 +1,10 @@
using System.Collections.Generic;
namespace API.DTOs
{
public class UpdateRbsDto
{
public string Username { get; init; }
public IList<string> Roles { get; init; }
}
}

View File

@ -1,5 +1,4 @@
using System.Collections.Generic; using System.Collections.Generic;
using API.Entities;
namespace API.DTOs namespace API.DTOs
{ {

24
API/DTOs/UsageInfoDto.cs Normal file
View File

@ -0,0 +1,24 @@
using System.Collections.Generic;
using API.Entities.Enums;
namespace API.DTOs
{
public class UsageInfoDto
{
public UsageInfoDto()
{
FileTypes = new HashSet<string>();
LibraryTypesCreated = new HashSet<LibInfo>();
}
public int UsersCount { get; set; }
public IEnumerable<string> FileTypes { get; set; }
public IEnumerable<LibInfo> LibraryTypesCreated { get; set; }
}
public class LibInfo
{
public LibraryType Type { get; set; }
public int Count { get; set; }
}
}

View File

@ -0,0 +1,33 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace API.DTOs
{
public class UsageStatisticsDto
{
public UsageStatisticsDto()
{
MarkAsUpdatedNow();
ClientsInfo = new List<ClientInfoDto>();
}
public string InstallId { get; set; }
public DateTime LastUpdate { get; set; }
public UsageInfoDto UsageInfo { get; set; }
public ServerInfoDto ServerInfo { get; set; }
public List<ClientInfoDto> ClientsInfo { get; set; }
public void MarkAsUpdatedNow()
{
LastUpdate = DateTime.UtcNow;
}
public void AddClientInfo(ClientInfoDto clientInfoDto)
{
if (ClientsInfo.Any(x => x.IsTheSameDevice(clientInfoDto))) return;
ClientsInfo.Add(clientInfoDto);
}
}
}

View File

@ -7,6 +7,8 @@ namespace API.DTOs
public ReadingDirection ReadingDirection { get; set; } public ReadingDirection ReadingDirection { get; set; }
public ScalingOption ScalingOption { get; set; } public ScalingOption ScalingOption { get; set; }
public PageSplitOption PageSplitOption { get; set; } public PageSplitOption PageSplitOption { get; set; }
public ReaderMode ReaderMode { get; set; }
public bool AutoCloseMenu { get; set; }
public bool BookReaderDarkMode { get; set; } = false; public bool BookReaderDarkMode { get; set; } = false;
public int BookReaderMargin { get; set; } public int BookReaderMargin { get; set; }
public int BookReaderLineSpacing { get; set; } public int BookReaderLineSpacing { get; set; }

View File

@ -0,0 +1,35 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Interfaces;
using Microsoft.EntityFrameworkCore;
namespace API.Data
{
public class FileRepository : IFileRepository
{
private readonly DataContext _dbContext;
public FileRepository(DataContext context)
{
_dbContext = context;
}
public async Task<IEnumerable<string>> GetFileExtensions()
{
var fileExtensions = await _dbContext.MangaFile
.AsNoTracking()
.Select(x => x.FilePath)
.Distinct()
.ToArrayAsync();
var uniqueFileTypes = fileExtensions
.Select(Path.GetExtension)
.Where(x => x is not null)
.Distinct();
return uniqueFileTypes;
}
}
}

View File

@ -106,6 +106,8 @@ namespace API.Data
.Where(x => x.Id == libraryId) .Where(x => x.Id == libraryId)
.Include(f => f.Folders) .Include(f => f.Folders)
.Include(l => l.Series) .Include(l => l.Series)
.ThenInclude(s => s.Metadata)
.Include(l => l.Series)
.ThenInclude(s => s.Volumes) .ThenInclude(s => s.Volumes)
.ThenInclude(v => v.Chapters) .ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files) .ThenInclude(c => c.Files)

View File

@ -0,0 +1,869 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210622164318_NewUserPreferences")]
partial class NewUserPreferences
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.4");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<bool>("AutoCloseMenu")
.HasColumnType("INTEGER");
b.Property<bool>("BookReaderDarkMode")
.HasColumnType("INTEGER");
b.Property<string>("BookReaderFontFamily")
.HasColumnType("TEXT");
b.Property<int>("BookReaderFontSize")
.HasColumnType("INTEGER");
b.Property<int>("BookReaderLineSpacing")
.HasColumnType("INTEGER");
b.Property<int>("BookReaderMargin")
.HasColumnType("INTEGER");
b.Property<int>("BookReaderReadingDirection")
.HasColumnType("INTEGER");
b.Property<bool>("BookReaderTapToPaginate")
.HasColumnType("INTEGER");
b.Property<int>("PageSplitOption")
.HasColumnType("INTEGER");
b.Property<int>("ReaderMode")
.HasColumnType("INTEGER");
b.Property<int>("ReadingDirection")
.HasColumnType("INTEGER");
b.Property<int>("ScalingOption")
.HasColumnType("INTEGER");
b.Property<bool>("SiteDarkMode")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId")
.IsUnique();
b.ToTable("AppUserPreferences");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<string>("BookScrollId")
.HasColumnType("TEXT");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("PagesRead")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserProgresses");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("Rating")
.HasColumnType("INTEGER");
b.Property<string>("Review")
.HasColumnType("TEXT");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserRating");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<bool>("IsSpecial")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Number")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("Range")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapter");
});
modelBuilder.Entity("API.Entities.CollectionTag", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<string>("NormalizedTitle")
.HasColumnType("TEXT");
b.Property<bool>("Promoted")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("Id", "Promoted")
.IsUnique();
b.ToTable("CollectionTag");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("LocalizedName")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId")
.IsUnique();
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.SeriesMetadata", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId")
.IsUnique();
b.HasIndex("Id", "SeriesId")
.IsUnique();
b.ToTable("SeriesMetadata");
});
modelBuilder.Entity("API.Entities.ServerSetting", b =>
{
b.Property<int>("Key")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("Key");
b.ToTable("ServerSetting");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("CollectionTagSeriesMetadata", b =>
{
b.Property<int>("CollectionTagsId")
.HasColumnType("INTEGER");
b.Property<int>("SeriesMetadatasId")
.HasColumnType("INTEGER");
b.HasKey("CollectionTagsId", "SeriesMetadatasId");
b.HasIndex("SeriesMetadatasId");
b.ToTable("CollectionTagSeriesMetadata");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithOne("UserPreferences")
.HasForeignKey("API.Entities.AppUserPreferences", "AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Progresses")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Ratings")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
.WithMany("Files")
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.SeriesMetadata", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithOne("Metadata")
.HasForeignKey("API.Entities.SeriesMetadata", "SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("CollectionTagSeriesMetadata", b =>
{
b.HasOne("API.Entities.CollectionTag", null)
.WithMany()
.HasForeignKey("CollectionTagsId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.SeriesMetadata", null)
.WithMany()
.HasForeignKey("SeriesMetadatasId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("Progresses");
b.Navigation("Ratings");
b.Navigation("UserPreferences");
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Navigation("Files");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Metadata");
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,35 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class NewUserPreferences : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "AutoCloseMenu",
table: "AppUserPreferences",
type: "INTEGER",
nullable: false,
defaultValue: false);
migrationBuilder.AddColumn<int>(
name: "ReaderMode",
table: "AppUserPreferences",
type: "INTEGER",
nullable: false,
defaultValue: 0);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "AutoCloseMenu",
table: "AppUserPreferences");
migrationBuilder.DropColumn(
name: "ReaderMode",
table: "AppUserPreferences");
}
}
}

View File

@ -127,6 +127,9 @@ namespace API.Data.Migrations
b.Property<int>("AppUserId") b.Property<int>("AppUserId")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
b.Property<bool>("AutoCloseMenu")
.HasColumnType("INTEGER");
b.Property<bool>("BookReaderDarkMode") b.Property<bool>("BookReaderDarkMode")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
@ -151,6 +154,9 @@ namespace API.Data.Migrations
b.Property<int>("PageSplitOption") b.Property<int>("PageSplitOption")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");
b.Property<int>("ReaderMode")
.HasColumnType("INTEGER");
b.Property<int>("ReadingDirection") b.Property<int>("ReadingDirection")
.HasColumnType("INTEGER"); .HasColumnType("INTEGER");

View File

@ -1,13 +1,14 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Reflection;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Constants; using API.Constants;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Services; using API.Services;
using Kavita.Common;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore;
namespace API.Data namespace API.Data
{ {
@ -15,11 +16,13 @@ namespace API.Data
{ {
public static async Task SeedRoles(RoleManager<AppRole> roleManager) public static async Task SeedRoles(RoleManager<AppRole> roleManager)
{ {
var roles = new List<AppRole> var roles = typeof(PolicyConstants)
{ .GetFields(BindingFlags.Public | BindingFlags.Static)
new() {Name = PolicyConstants.AdminRole}, .Where(f => f.FieldType == typeof(string))
new() {Name = PolicyConstants.PlebRole} .ToDictionary(f => f.Name,
}; f => (string) f.GetValue(null)).Values
.Select(policyName => new AppRole() {Name = policyName})
.ToList();
foreach (var role in roles) foreach (var role in roles)
{ {
@ -39,10 +42,11 @@ namespace API.Data
{ {
new() {Key = ServerSettingKey.CacheDirectory, Value = CacheService.CacheDirectory}, new() {Key = ServerSettingKey.CacheDirectory, Value = CacheService.CacheDirectory},
new () {Key = ServerSettingKey.TaskScan, Value = "daily"}, new () {Key = ServerSettingKey.TaskScan, Value = "daily"},
//new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, // Not used from DB, but DB is sync with appSettings.json
new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"},
new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))}, new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))},
new () {Key = ServerSettingKey.Port, Value = "5000"}, new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json
new () {Key = ServerSettingKey.AllowStatCollection, Value = "true"},
}; };
foreach (var defaultSetting in defaultSettings) foreach (var defaultSetting in defaultSettings)
@ -55,22 +59,16 @@ namespace API.Data
} }
await context.SaveChangesAsync(); await context.SaveChangesAsync();
}
public static async Task SeedSeriesMetadata(DataContext context) // Port and LoggingLevel are managed in appSettings.json. Update the DB values to match
{ var configFile = Program.GetAppSettingFilename();
await context.Database.EnsureCreatedAsync(); context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.Port).Value =
Configuration.GetPort(configFile) + "";
context.Database.EnsureCreated(); context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.LoggingLevel).Value =
var series = await context.Series Configuration.GetLogLevel(configFile);
.Include(s => s.Metadata).ToListAsync();
foreach (var s in series)
{
s.Metadata ??= new SeriesMetadata();
}
await context.SaveChangesAsync(); await context.SaveChangesAsync();
} }
} }
} }

View File

@ -289,7 +289,7 @@ namespace API.Data
/// </summary> /// </summary>
/// <param name="userId"></param> /// <param name="userId"></param>
/// <param name="libraryId">Library to restrict to, if 0, will apply to all libraries</param> /// <param name="libraryId">Library to restrict to, if 0, will apply to all libraries</param>
/// <param name="limit">How many series to pick.</param> /// <param name="userParams">Contains pagination information</param>
/// <returns></returns> /// <returns></returns>
public async Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams) public async Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams)
{ {
@ -411,5 +411,16 @@ namespace API.Data
return await PagedList<SeriesDto>.CreateAsync(query, userParams.PageNumber, userParams.PageSize); return await PagedList<SeriesDto>.CreateAsync(query, userParams.PageNumber, userParams.PageSize);
} }
public async Task<IList<MangaFile>> GetFilesForSeries(int seriesId)
{
return await _context.Volume
.Where(v => v.SeriesId == seriesId)
.Include(v => v.Chapters)
.ThenInclude(c => c.Files)
.SelectMany(v => v.Chapters.SelectMany(c => c.Files))
.AsNoTracking()
.ToListAsync();
}
} }
} }

View File

@ -29,8 +29,13 @@ namespace API.Data
public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context);
public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper); public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper);
public IFileRepository FileRepository => new FileRepository(_context);
public async Task<bool> Complete() public bool Commit()
{
return _context.SaveChanges() > 0;
}
public async Task<bool> CommitAsync()
{ {
return await _context.SaveChangesAsync() > 0; return await _context.SaveChangesAsync() > 0;
} }
@ -39,5 +44,16 @@ namespace API.Data
{ {
return _context.ChangeTracker.HasChanges(); return _context.ChangeTracker.HasChanges();
} }
public async Task<bool> RollbackAsync()
{
await _context.DisposeAsync();
return true;
}
public bool Rollback()
{
_context.Dispose();
return true;
}
} }
} }

View File

@ -66,6 +66,8 @@ namespace API.Data
} }
public async Task<ChapterDto> GetChapterDtoAsync(int chapterId) public async Task<ChapterDto> GetChapterDtoAsync(int chapterId)
{ {
var chapter = await _context.Chapter var chapter = await _context.Chapter
@ -84,5 +86,15 @@ namespace API.Data
.AsNoTracking() .AsNoTracking()
.ToListAsync(); .ToListAsync();
} }
public async Task<IList<MangaFile>> GetFilesForVolume(int volumeId)
{
return await _context.Chapter
.Where(c => volumeId == c.VolumeId)
.Include(c => c.Files)
.SelectMany(c => c.Files)
.AsNoTracking()
.ToListAsync();
}
} }
} }

View File

@ -17,7 +17,18 @@ namespace API.Entities
/// Manga Reader Option: Which side of a split image should we show first /// Manga Reader Option: Which side of a split image should we show first
/// </summary> /// </summary>
public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft;
/// <summary>
/// Manga Reader Option: How the manga reader should perform paging or reading of the file
/// <example>
/// Webtoon uses scrolling to page, MANGA_LR uses paging by clicking left/right side of reader, MANGA_UD uses paging
/// by clicking top/bottom sides of reader.
/// </example>
/// </summary>
public ReaderMode ReaderMode { get; set; }
/// <summary>
/// Manga Reader Option: Allow the menu to close after 6 seconds without interaction
/// </summary>
public bool AutoCloseMenu { get; set; }
/// <summary> /// <summary>
/// Book Reader Option: Should the background color be dark /// Book Reader Option: Should the background color be dark
/// </summary> /// </summary>
@ -46,10 +57,11 @@ namespace API.Entities
/// Book Reader Option: What direction should the next/prev page buttons go /// Book Reader Option: What direction should the next/prev page buttons go
/// </summary> /// </summary>
public ReadingDirection BookReaderReadingDirection { get; set; } = ReadingDirection.LeftToRight; public ReadingDirection BookReaderReadingDirection { get; set; } = ReadingDirection.LeftToRight;
/// <summary> /// <summary>
/// UI Site Global Setting: Whether the UI should render in Dark mode or not. /// UI Site Global Setting: Whether the UI should render in Dark mode or not.
/// </summary> /// </summary>
public bool SiteDarkMode { get; set; } public bool SiteDarkMode { get; set; } = true;

View File

@ -0,0 +1,14 @@
using System.ComponentModel;
namespace API.Entities.Enums
{
public enum ReaderMode
{
[Description("Left and Right")]
MANGA_LR = 0,
[Description("Up and Down")]
MANGA_UP = 1,
[Description("Webtoon")]
WEBTOON = 2
}
}

View File

@ -15,6 +15,9 @@ namespace API.Entities.Enums
[Description("Port")] [Description("Port")]
Port = 4, Port = 4,
[Description("BackupDirectory")] [Description("BackupDirectory")]
BackupDirectory = 5 BackupDirectory = 5,
[Description("AllowStatCollection")]
AllowStatCollection = 6,
} }
} }

View File

@ -32,7 +32,7 @@ namespace API.Entities
/// <summary> /// <summary>
/// Summary information related to the Series /// Summary information related to the Series
/// </summary> /// </summary>
public string Summary { get; set; } // TODO: Migrate into SeriesMetdata public string Summary { get; set; } // TODO: Migrate into SeriesMetdata (with Metadata update)
public DateTime Created { get; set; } public DateTime Created { get; set; }
public DateTime LastModified { get; set; } public DateTime LastModified { get; set; }
public byte[] CoverImage { get; set; } public byte[] CoverImage { get; set; }

View File

@ -4,18 +4,22 @@ using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using API.Services;
using API.Services.Tasks; using API.Services.Tasks;
using Kavita.Common;
using Microsoft.AspNetCore.Hosting;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Extensions namespace API.Extensions
{ {
public static class ApplicationServiceExtensions public static class ApplicationServiceExtensions
{ {
public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config) public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env)
{ {
services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly); services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly);
services.AddScoped<IStatsService, StatsService>();
services.AddScoped<ITaskScheduler, TaskScheduler>(); services.AddScoped<ITaskScheduler, TaskScheduler>();
services.AddScoped<IDirectoryService, DirectoryService>(); services.AddScoped<IDirectoryService, DirectoryService>();
services.AddScoped<ITokenService, TokenService>(); services.AddScoped<ITokenService, TokenService>();
@ -28,11 +32,7 @@ namespace API.Extensions
services.AddScoped<ICleanupService, CleanupService>(); services.AddScoped<ICleanupService, CleanupService>();
services.AddScoped<IBookService, BookService>(); services.AddScoped<IBookService, BookService>();
services.AddSqLite(config, env);
services.AddDbContext<DataContext>(options =>
{
options.UseSqlite(config.GetConnectionString("DefaultConnection"));
});
services.AddLogging(loggingBuilder => services.AddLogging(loggingBuilder =>
{ {
@ -43,8 +43,16 @@ namespace API.Extensions
return services; return services;
} }
public static IServiceCollection AddStartupTask<T>(this IServiceCollection services) private static IServiceCollection AddSqLite(this IServiceCollection services, IConfiguration config,
where T : class, IStartupTask IWebHostEnvironment env)
=> services.AddTransient<IStartupTask, T>(); {
services.AddDbContext<DataContext>(options =>
{
options.UseSqlite(config.GetConnectionString("DefaultConnection"));
options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.GetLogLevel(Program.GetAppSettingFilename()).Equals("Debug"));
});
return services;
}
} }
} }

View File

@ -1,12 +1,12 @@
using System; using System.IO;
using System.IO;
using System.Linq; using System.Linq;
using API.Services; using API.Comparators;
namespace API.Extensions namespace API.Extensions
{ {
public static class DirectoryInfoExtensions public static class DirectoryInfoExtensions
{ {
private static readonly NaturalSortComparer Comparer = new NaturalSortComparer();
public static void Empty(this DirectoryInfo directory) public static void Empty(this DirectoryInfo directory)
{ {
foreach(FileInfo file in directory.EnumerateFiles()) file.Delete(); foreach(FileInfo file in directory.EnumerateFiles()) file.Delete();
@ -49,12 +49,13 @@ namespace API.Extensions
if (!root.FullName.Equals(directory.FullName)) if (!root.FullName.Equals(directory.FullName))
{ {
var fileIndex = 1; var fileIndex = 1;
foreach (var file in directory.EnumerateFiles())
foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, Comparer))
{ {
if (file.Directory == null) continue; if (file.Directory == null) continue;
var paddedIndex = Parser.Parser.PadZeros(directoryIndex + ""); var paddedIndex = Parser.Parser.PadZeros(directoryIndex + "");
// We need to rename the files so that after flattening, they are in the order we found them // We need to rename the files so that after flattening, they are in the order we found them
var newName = $"{paddedIndex}_{fileIndex}.{file.Extension}"; var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}";
var newPath = Path.Join(root.FullName, newName); var newPath = Path.Join(root.FullName, newName);
if (!File.Exists(newPath)) file.MoveTo(newPath); if (!File.Exists(newPath)) file.MoveTo(newPath);
fileIndex++; fileIndex++;

View File

@ -39,6 +39,7 @@ namespace API.Extensions
services.AddAuthorization(opt => services.AddAuthorization(opt =>
{ {
opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole)); opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole));
opt.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole));
}); });
return services; return services;

View File

@ -1,4 +1,7 @@
using API.Interfaces.Services; using System;
using API.Interfaces.Services;
using API.Services.Clients;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
namespace API.Extensions namespace API.Extensions
@ -8,5 +11,15 @@ namespace API.Extensions
public static IServiceCollection AddStartupTask<T>(this IServiceCollection services) public static IServiceCollection AddStartupTask<T>(this IServiceCollection services)
where T : class, IStartupTask where T : class, IStartupTask
=> services.AddTransient<IStartupTask, T>(); => services.AddTransient<IStartupTask, T>();
public static IServiceCollection AddStatsClient(this IServiceCollection services, IConfiguration configuration)
{
services.AddHttpClient<StatsApiClient>(client =>
{
client.DefaultRequestHeaders.Add("api-key", "MsnvA2DfQqxSK5jh");
});
return services;
}
} }
} }

View File

@ -30,6 +30,9 @@ namespace API.Helpers.Converters
case ServerSettingKey.Port: case ServerSettingKey.Port:
destination.Port = int.Parse(row.Value); destination.Port = int.Parse(row.Value);
break; break;
case ServerSettingKey.AllowStatCollection:
destination.AllowStatCollection = bool.Parse(row.Value);
break;
} }
} }

View File

@ -0,0 +1,10 @@
using System.Collections.Generic;
using System.Threading.Tasks;
namespace API.Interfaces
{
public interface IFileRepository
{
Task<IEnumerable<string>> GetFileExtensions();
}
}

View File

@ -61,5 +61,6 @@ namespace API.Interfaces
Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams); Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams);
Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId); Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId);
Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams); Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams);
Task<IList<MangaFile>> GetFilesForSeries(int seriesId);
} }
} }

View File

@ -11,5 +11,7 @@
void RefreshMetadata(int libraryId, bool forceUpdate = true); void RefreshMetadata(int libraryId, bool forceUpdate = true);
void CleanupTemp(); void CleanupTemp();
void RefreshSeriesMetadata(int libraryId, int seriesId); void RefreshSeriesMetadata(int libraryId, int seriesId);
void ScheduleStatsTasks();
void CancelStatsTasks();
} }
} }

View File

@ -11,7 +11,11 @@ namespace API.Interfaces
ISettingsRepository SettingsRepository { get; } ISettingsRepository SettingsRepository { get; }
IAppUserProgressRepository AppUserProgressRepository { get; } IAppUserProgressRepository AppUserProgressRepository { get; }
ICollectionTagRepository CollectionTagRepository { get; } ICollectionTagRepository CollectionTagRepository { get; }
Task<bool> Complete(); IFileRepository FileRepository { get; }
bool Commit();
Task<bool> CommitAsync();
bool HasChanges(); bool HasChanges();
bool Rollback();
Task<bool> RollbackAsync();
} }
} }

View File

@ -13,5 +13,6 @@ namespace API.Interfaces
Task<IList<MangaFile>> GetFilesForChapter(int chapterId); Task<IList<MangaFile>> GetFilesForChapter(int chapterId);
Task<IList<Chapter>> GetChaptersAsync(int volumeId); Task<IList<Chapter>> GetChaptersAsync(int volumeId);
Task<byte[]> GetChapterCoverImageAsync(int chapterId); Task<byte[]> GetChapterCoverImageAsync(int chapterId);
Task<IList<MangaFile>> GetFilesForVolume(int volumeId);
} }
} }

View File

@ -1,4 +1,7 @@
using System.IO.Compression; using System;
using System.Collections.Generic;
using System.IO.Compression;
using System.Threading.Tasks;
using API.Archive; using API.Archive;
namespace API.Interfaces.Services namespace API.Interfaces.Services
@ -12,5 +15,6 @@ namespace API.Interfaces.Services
string GetSummaryInfo(string archivePath); string GetSummaryInfo(string archivePath);
ArchiveLibrary CanOpen(string archivePath); ArchiveLibrary CanOpen(string archivePath);
bool ArchiveNeedsFlattening(ZipArchive archive); bool ArchiveNeedsFlattening(ZipArchive archive);
Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder);
} }
} }

View File

@ -0,0 +1,13 @@
using System.Threading.Tasks;
using API.DTOs;
namespace API.Interfaces.Services
{
public interface IStatsService
{
Task PathData(ClientInfoDto clientInfoDto);
Task FinalizeStats();
Task CollectRelevantData();
Task CollectAndSendStatsData();
}
}

View File

@ -9,10 +9,13 @@ namespace API.Parser
{ {
public static class Parser public static class Parser
{ {
public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7"; public const string DefaultChapter = "0";
public static readonly string BookFileExtensions = @"\.epub"; public const string DefaultVolume = "0";
public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)";
public static readonly Regex FontSrcUrlRegex = new Regex("(src:url\\(\"?'?)([a-z0-9/\\._]+)(\"?'?\\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7";
public const string BookFileExtensions = @"\.epub";
public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)";
public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))", RegexOptions.IgnoreCase | RegexOptions.Compiled);
public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled); public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly string XmlRegexExtensions = @"\.xml"; private static readonly string XmlRegexExtensions = @"\.xml";
@ -92,7 +95,7 @@ namespace API.Parser
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex( new Regex(
@"(?<Series>.*) (\b|_|-)v", @"(?<Series>.*) (\b|_|-)(v|ch\.?|c)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
//Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
// due to duplicate version identifiers in file. // due to duplicate version identifiers in file.
@ -197,6 +200,14 @@ namespace API.Parser
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)v\d+", @"^(?<Series>.*)(?: |_)v\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Amazing Man Comics chapter 25
new Regex(
@"^(?<Series>.*)(?: |_)c(hapter) \d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Amazing Man Comics issue #25
new Regex(
@"^(?<Series>.*)(?: |_)i(ssue) #\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.*)(?: \d+)", @"^(?<Series>.*)(?: \d+)",
@ -239,11 +250,11 @@ namespace API.Parser
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)(?<!of )(?<Volume>\d+)", @"^(?<Series>.*)(?<!c(hapter)|i(ssue))(?<!of)(?: |_)(?<!of )(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.*)(?<!of)(?: (?<Volume>\d+))", @"^(?<Series>.*)(?<!c(hapter)|i(ssue))(?<!of)(?: (?<Volume>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Batman & Robin the Teen Wonder #0 // Batman & Robin the Teen Wonder #0
new Regex( new Regex(
@ -281,6 +292,14 @@ namespace API.Parser
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", @"^(?<Series>.*)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Amazing Man Comics chapter 25
new Regex(
@"^(?!Vol)(?<Series>.*)( |_)c(hapter)( |_)(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Amazing Man Comics issue #25
new Regex(
@"^(?!Vol)(?<Series>.*)( |_)i(ssue)( |_) #(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
}; };
private static readonly Regex[] ReleaseGroupRegex = new[] private static readonly Regex[] ReleaseGroupRegex = new[]
@ -372,10 +391,16 @@ namespace API.Parser
{ {
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
new Regex( new Regex(
@"(?<Special>Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|(?<!The\s)Anthology|Bonus)", @"(?<Special>Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)",
RegexOptions.IgnoreCase | RegexOptions.Compiled), RegexOptions.IgnoreCase | RegexOptions.Compiled),
}; };
// If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found.
private static readonly Regex SpecialMarkerRegex = new Regex(
@"(?<Special>SP\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled
);
/// <summary> /// <summary>
/// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed
@ -424,7 +449,7 @@ namespace API.Parser
{ {
var folder = fallbackFolders[i]; var folder = fallbackFolders[i];
if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue;
if (ParseVolume(folder) != "0" || ParseChapter(folder) != "0") continue; if (ParseVolume(folder) != DefaultVolume || ParseChapter(folder) != DefaultChapter) continue;
var series = ParseSeries(folder); var series = ParseSeries(folder);
@ -453,12 +478,22 @@ namespace API.Parser
var isSpecial = ParseMangaSpecial(fileName); var isSpecial = ParseMangaSpecial(fileName);
// We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that
// could cause a problem as Omake is a special term, but there is valid volume/chapter information. // could cause a problem as Omake is a special term, but there is valid volume/chapter information.
if (ret.Chapters == "0" && ret.Volumes == "0" && !string.IsNullOrEmpty(isSpecial)) if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial))
{ {
ret.IsSpecial = true; ret.IsSpecial = true;
} }
if (HasSpecialMarker(fileName))
{
ret.IsSpecial = true;
ret.Chapters = DefaultChapter;
ret.Volumes = DefaultVolume;
}
if (string.IsNullOrEmpty(ret.Series))
{
ret.Series = CleanTitle(fileName);
}
return ret.Series == string.Empty ? null : ret; return ret.Series == string.Empty ? null : ret;
} }
@ -491,6 +526,25 @@ namespace API.Parser
return string.Empty; return string.Empty;
} }
/// <summary>
/// If the file has SP marker.
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public static bool HasSpecialMarker(string filePath)
{
var matches = SpecialMarkerRegex.Matches(filePath);
foreach (Match match in matches)
{
if (match.Groups["Special"].Success && match.Groups["Special"].Value != string.Empty)
{
return true;
}
}
return false;
}
public static string ParseMangaSpecial(string filePath) public static string ParseMangaSpecial(string filePath)
{ {
foreach (var regex in MangaSpecialRegex) foreach (var regex in MangaSpecialRegex)
@ -560,7 +614,7 @@ namespace API.Parser
} }
} }
return "0"; return DefaultVolume;
} }
public static string ParseComicVolume(string filename) public static string ParseComicVolume(string filename)
@ -582,7 +636,7 @@ namespace API.Parser
} }
} }
return "0"; return DefaultVolume;
} }
public static string ParseChapter(string filename) public static string ParseChapter(string filename)
@ -610,7 +664,7 @@ namespace API.Parser
} }
} }
return "0"; return DefaultChapter;
} }
private static string AddChapterPart(string value) private static string AddChapterPart(string value)
@ -648,7 +702,7 @@ namespace API.Parser
} }
} }
return "0"; return DefaultChapter;
} }
private static string RemoveEditionTagHolders(string title) private static string RemoveEditionTagHolders(string title)
@ -794,14 +848,22 @@ namespace API.Parser
} }
public static float MinimumNumberFromRange(string range) public static float MinimumNumberFromRange(string range)
{
try
{ {
if (!Regex.IsMatch(range, @"^[\d-.]+$")) if (!Regex.IsMatch(range, @"^[\d-.]+$"))
{ {
return (float) 0.0; return (float) 0.0;
} }
var tokens = range.Replace("_", string.Empty).Split("-"); var tokens = range.Replace("_", string.Empty).Split("-");
return tokens.Min(float.Parse); return tokens.Min(float.Parse);
} }
catch
{
return (float) 0.0;
}
}
public static string Normalize(string name) public static string Normalize(string name)
{ {

View File

@ -3,7 +3,7 @@
namespace API.Parser namespace API.Parser
{ {
/// <summary> /// <summary>
/// This represents a single file /// This represents all parsed information from a single file
/// </summary> /// </summary>
public class ParserInfo public class ParserInfo
{ {

View File

@ -5,6 +5,7 @@ using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data; using API.Data;
using API.Entities; using API.Entities;
using API.Services.HostedServices;
using Kavita.Common; using Kavita.Common;
using Kavita.Common.EnvironmentInfo; using Kavita.Common.EnvironmentInfo;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
@ -20,13 +21,13 @@ namespace API
{ {
public class Program public class Program
{ {
private static readonly int HttpPort = 5000; private static int _httpPort;
protected Program() protected Program()
{ {
} }
private static string GetAppSettingFilename() public static string GetAppSettingFilename()
{ {
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
var isDevelopment = environment == Environments.Development; var isDevelopment = environment == Environments.Development;
@ -47,6 +48,9 @@ namespace API
Configuration.UpdateJwtToken(GetAppSettingFilename(), base64); Configuration.UpdateJwtToken(GetAppSettingFilename(), base64);
} }
// Get HttpPort from Config
_httpPort = Configuration.GetPort(GetAppSettingFilename());
var host = CreateHostBuilder(args).Build(); var host = CreateHostBuilder(args).Build();
@ -61,8 +65,6 @@ namespace API
await context.Database.MigrateAsync(); await context.Database.MigrateAsync();
await Seed.SeedRoles(roleManager); await Seed.SeedRoles(roleManager);
await Seed.SeedSettings(context); await Seed.SeedSettings(context);
// TODO: Remove this in v0.4.2
await Seed.SeedSeriesMetadata(context);
} }
catch (Exception ex) catch (Exception ex)
{ {
@ -79,7 +81,7 @@ namespace API
{ {
webBuilder.UseKestrel((opts) => webBuilder.UseKestrel((opts) =>
{ {
opts.ListenAnyIP(HttpPort, options => opts.ListenAnyIP(_httpPort, options =>
{ {
options.Protocols = HttpProtocols.Http1AndHttp2; options.Protocols = HttpProtocols.Http1AndHttp2;
}); });
@ -106,8 +108,16 @@ namespace API
options.BeforeSend = sentryEvent => options.BeforeSend = sentryEvent =>
{ {
if (sentryEvent.Exception != null if (sentryEvent.Exception != null
&& sentryEvent.Exception.Message.Contains("[GetCoverImage] This archive cannot be read:") && sentryEvent.Exception.Message.StartsWith("[GetCoverImage]")
&& sentryEvent.Exception.Message.Contains("[BookService] ")) && sentryEvent.Exception.Message.StartsWith("[BookService]")
&& sentryEvent.Exception.Message.StartsWith("[ExtractArchive]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]")
&& sentryEvent.Exception.Message.Contains("EPUB parsing error")
&& sentryEvent.Exception.Message.Contains("Unsupported EPUB version")
&& sentryEvent.Exception.Message.Contains("Incorrect EPUB")
&& sentryEvent.Exception.Message.Contains("Access is Denied"))
{ {
return null; // Don't send this event to Sentry return null; // Don't send this event to Sentry
} }

View File

@ -4,12 +4,14 @@ using System.Diagnostics;
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using System.Threading.Tasks;
using System.Xml.Serialization; using System.Xml.Serialization;
using API.Archive; using API.Archive;
using API.Comparators; using API.Comparators;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services.Tasks; using API.Services.Tasks;
using Kavita.Common;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.IO; using Microsoft.IO;
using SharpCompress.Archives; using SharpCompress.Archives;
@ -25,13 +27,15 @@ namespace API.Services
public class ArchiveService : IArchiveService public class ArchiveService : IArchiveService
{ {
private readonly ILogger<ArchiveService> _logger; private readonly ILogger<ArchiveService> _logger;
private readonly IDirectoryService _directoryService;
private const int ThumbnailWidth = 320; // 153w x 230h private const int ThumbnailWidth = 320; // 153w x 230h
private static readonly RecyclableMemoryStreamManager StreamManager = new(); private static readonly RecyclableMemoryStreamManager StreamManager = new();
private readonly NaturalSortComparer _comparer; private readonly NaturalSortComparer _comparer;
public ArchiveService(ILogger<ArchiveService> logger) public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService)
{ {
_logger = logger; _logger = logger;
_directoryService = directoryService;
_comparer = new NaturalSortComparer(); _comparer = new NaturalSortComparer();
} }
@ -217,6 +221,38 @@ namespace API.Services
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName));
} }
public async Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder)
{
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var tempLocation = Path.Join(tempDirectory, $"{tempFolder}_{dateString}");
DirectoryService.ExistOrCreate(tempLocation);
if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
{
throw new KavitaException("Unable to copy files to temp directory archive download.");
}
var zipPath = Path.Join(tempDirectory, $"kavita_{tempFolder}_{dateString}.zip");
try
{
ZipFile.CreateFromDirectory(tempLocation, zipPath);
}
catch (AggregateException ex)
{
_logger.LogError(ex, "There was an issue creating temp archive");
throw new KavitaException("There was an issue creating temp archive");
}
var fileBytes = await _directoryService.ReadFileAsync(zipPath);
DirectoryService.ClearAndDeleteDirectory(tempLocation);
(new FileInfo(zipPath)).Delete();
return Tuple.Create(fileBytes, zipPath);
}
private byte[] CreateThumbnail(string entryName, Stream stream, string formatExtension = ".jpg") private byte[] CreateThumbnail(string entryName, Stream stream, string formatExtension = ".jpg")
{ {
if (!formatExtension.StartsWith(".")) if (!formatExtension.StartsWith("."))
@ -230,7 +266,7 @@ namespace API.Services
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogWarning(ex, "There was an error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName); _logger.LogWarning(ex, "[GetCoverImage] There was an error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName);
} }
return Array.Empty<byte>(); return Array.Empty<byte>();
@ -245,13 +281,13 @@ namespace API.Services
{ {
if (!File.Exists(archivePath)) if (!File.Exists(archivePath))
{ {
_logger.LogError("Archive {ArchivePath} could not be found", archivePath); _logger.LogWarning("Archive {ArchivePath} could not be found", archivePath);
return false; return false;
} }
if (Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath)) return true; if (Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath)) return true;
_logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); _logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath);
return false; return false;
} }
@ -407,7 +443,7 @@ namespace API.Services
} }
catch (Exception e) catch (Exception e)
{ {
_logger.LogWarning(e, "There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); _logger.LogWarning(e, "[ExtractArchive] There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath);
return; return;
} }
_logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);

View File

@ -90,6 +90,7 @@ namespace API.Services
else else
{ {
anchor.Attributes.Add("target", "_blank"); anchor.Attributes.Add("target", "_blank");
anchor.Attributes.Add("rel", "noreferrer noopener");
} }
return; return;
@ -167,7 +168,7 @@ namespace API.Services
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "[BookService] There was an exception getting summary, defaulting to empty string"); _logger.LogWarning(ex, "[BookService] There was an exception getting summary, defaulting to empty string");
} }
return string.Empty; return string.Empty;
@ -177,13 +178,13 @@ namespace API.Services
{ {
if (!File.Exists(filePath)) if (!File.Exists(filePath))
{ {
_logger.LogError("[BookService] Book {EpubFile} could not be found", filePath); _logger.LogWarning("[BookService] Book {EpubFile} could not be found", filePath);
return false; return false;
} }
if (Parser.Parser.IsBook(filePath)) return true; if (Parser.Parser.IsBook(filePath)) return true;
_logger.LogError("[BookService] Book {EpubFile} is not a valid EPUB", filePath); _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB", filePath);
return false; return false;
} }
@ -198,12 +199,19 @@ namespace API.Services
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); _logger.LogWarning(ex, "[BookService] There was an exception getting number of pages, defaulting to 0");
} }
return 0; return 0;
} }
public static string EscapeTags(string content)
{
content = Regex.Replace(content, @"<script(.*)(/>)", "<script$1></script>");
content = Regex.Replace(content, @"<title(.*)(/>)", "<title$1></title>");
return content;
}
public static string CleanContentKeys(string key) public static string CleanContentKeys(string key)
{ {
return key.Replace("../", string.Empty); return key.Replace("../", string.Empty);
@ -235,6 +243,83 @@ namespace API.Services
{ {
using var epubBook = EpubReader.OpenBook(filePath); using var epubBook = EpubReader.OpenBook(filePath);
// If the epub has the following tags, we can group the books as Volumes
// <meta content="5.0" name="calibre:series_index"/>
// <meta content="The Dark Tower" name="calibre:series"/>
// <meta content="Wolves of the Calla" name="calibre:title_sort"/>
// If all three are present, we can take that over dc:title and format as:
// Series = The Dark Tower, Volume = 5, Filename as "Wolves of the Calla"
// In addition, the following can exist and should parse as a series (EPUB 3.2 spec)
// <meta property="belongs-to-collection" id="c01">
// The Lord of the Rings
// </meta>
// <meta refines="#c01" property="collection-type">set</meta>
// <meta refines="#c01" property="group-position">2</meta>
try
{
var seriesIndex = string.Empty;
var series = string.Empty;
var specialName = string.Empty;
var groupPosition = string.Empty;
foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems)
{
// EPUB 2 and 3
switch (metadataItem.Name)
{
case "calibre:series_index":
seriesIndex = metadataItem.Content;
break;
case "calibre:series":
series = metadataItem.Content;
break;
case "calibre:title_sort":
specialName = metadataItem.Content;
break;
}
// EPUB 3.2+ only
switch (metadataItem.Property)
{
case "group-position":
seriesIndex = metadataItem.Content;
break;
case "belongs-to-collection":
series = metadataItem.Content;
break;
case "collection-type":
groupPosition = metadataItem.Content;
break;
}
}
if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex) &&
(!string.IsNullOrEmpty(specialName) || groupPosition.Equals("series") || groupPosition.Equals("set")))
{
if (string.IsNullOrEmpty(specialName))
{
specialName = epubBook.Title;
}
return new ParserInfo()
{
Chapters = "0",
Edition = "",
Format = MangaFormat.Book,
Filename = Path.GetFileName(filePath),
Title = specialName,
FullFilePath = filePath,
IsSpecial = false,
Series = series,
Volumes = seriesIndex.Split(".")[0]
};
}
}
catch (Exception)
{
// Swallow exception
}
return new ParserInfo() return new ParserInfo()
{ {
Chapters = "0", Chapters = "0",
@ -250,7 +335,7 @@ namespace API.Services
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath);
} }
return null; return null;
@ -285,7 +370,7 @@ namespace API.Services
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogError(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); _logger.LogWarning(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath);
} }
return Array.Empty<byte>(); return Array.Empty<byte>();

View File

@ -63,10 +63,6 @@ namespace API.Services
} }
new DirectoryInfo(extractPath).Flatten(); new DirectoryInfo(extractPath).Flatten();
// if (fileCount > 1)
// {
// new DirectoryInfo(extractPath).Flatten();
// }
return chapter; return chapter;
} }

View File

@ -0,0 +1,62 @@
using System;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
using API.Configurations.CustomOptions;
using API.DTOs;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace API.Services.Clients
{
public class StatsApiClient
{
private readonly HttpClient _client;
private readonly StatsOptions _options;
private readonly ILogger<StatsApiClient> _logger;
private const string ApiUrl = "https://stats.kavitareader.com";
public StatsApiClient(HttpClient client, IOptions<StatsOptions> options, ILogger<StatsApiClient> logger)
{
_client = client;
_logger = logger;
_options = options.Value ?? throw new ArgumentNullException(nameof(options));
}
public async Task SendDataToStatsServer(UsageStatisticsDto data)
{
var responseContent = string.Empty;
try
{
using var response = await _client.PostAsJsonAsync(ApiUrl + "/api/InstallationStats", data);
responseContent = await response.Content.ReadAsStringAsync();
response.EnsureSuccessStatusCode();
}
catch (HttpRequestException e)
{
var info = new
{
dataSent = data,
response = responseContent
};
_logger.LogError(e, "The StatsServer did not respond successfully. {Content}", info);
Console.WriteLine(e);
throw;
}
catch (Exception e)
{
_logger.LogError(e, "An error happened during the request to the Stats Server");
Console.WriteLine(e);
throw;
}
}
}
}

View File

@ -9,6 +9,7 @@
public string Publisher { get; set; } public string Publisher { get; set; }
public string Genre { get; set; } public string Genre { get; set; }
public int PageCount { get; set; } public int PageCount { get; set; }
// ReSharper disable once InconsistentNaming
public string LanguageISO { get; set; } public string LanguageISO { get; set; }
public string Web { get; set; } public string Web { get; set; }
} }

View File

@ -13,6 +13,9 @@ namespace API.Services
public class DirectoryService : IDirectoryService public class DirectoryService : IDirectoryService
{ {
private readonly ILogger<DirectoryService> _logger; private readonly ILogger<DirectoryService> _logger;
private static readonly Regex ExcludeDirectories = new Regex(
@"@eaDir|\.DS_Store",
RegexOptions.Compiled | RegexOptions.IgnoreCase);
public DirectoryService(ILogger<DirectoryService> logger) public DirectoryService(ILogger<DirectoryService> logger)
{ {
@ -102,6 +105,16 @@ namespace API.Services
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path); return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path);
} }
/// <summary>
/// Returns the total number of bytes for a given set of full file paths
/// </summary>
/// <param name="paths"></param>
/// <returns>Total bytes</returns>
public static long GetTotalSize(IEnumerable<string> paths)
{
return paths.Sum(path => new FileInfo(path).Length);
}
/// <summary> /// <summary>
/// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases.
/// </summary> /// </summary>
@ -212,6 +225,7 @@ namespace API.Services
/// <param name="root">Directory to scan</param> /// <param name="root">Directory to scan</param>
/// <param name="action">Action to apply on file path</param> /// <param name="action">Action to apply on file path</param>
/// <param name="searchPattern">Regex pattern to search against</param> /// <param name="searchPattern">Regex pattern to search against</param>
/// <param name="logger"></param>
/// <exception cref="ArgumentException"></exception> /// <exception cref="ArgumentException"></exception>
public static int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern, ILogger logger) public static int TraverseTreeParallelForEach(string root, Action<string> action, string searchPattern, ILogger logger)
{ {
@ -231,11 +245,11 @@ namespace API.Services
while (dirs.Count > 0) { while (dirs.Count > 0) {
var currentDir = dirs.Pop(); var currentDir = dirs.Pop();
string[] subDirs; IEnumerable<string> subDirs;
string[] files; string[] files;
try { try {
subDirs = Directory.GetDirectories(currentDir); subDirs = Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0);
} }
// Thrown if we do not have discovery permission on the directory. // Thrown if we do not have discovery permission on the directory.
catch (UnauthorizedAccessException e) { catch (UnauthorizedAccessException e) {
@ -306,7 +320,7 @@ namespace API.Services
// Push the subdirectories onto the stack for traversal. // Push the subdirectories onto the stack for traversal.
// This could also be done before handing the files. // This could also be done before handing the files.
foreach (string str in subDirs) foreach (var str in subDirs)
dirs.Push(str); dirs.Push(str);
} }

View File

@ -0,0 +1,54 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
namespace API.Services.HostedServices
{
public class StartupTasksHostedService : IHostedService
{
private readonly IServiceProvider _provider;
public StartupTasksHostedService(IServiceProvider serviceProvider)
{
_provider = serviceProvider;
}
public async Task StartAsync(CancellationToken cancellationToken)
{
using var scope = _provider.CreateScope();
var taskScheduler = scope.ServiceProvider.GetRequiredService<ITaskScheduler>();
taskScheduler.ScheduleTasks();
try
{
await ManageStartupStatsTasks(scope, taskScheduler);
}
catch (Exception)
{
//If stats startup fail the user can keep using the app
}
}
private async Task ManageStartupStatsTasks(IServiceScope serviceScope, ITaskScheduler taskScheduler)
{
var unitOfWork = serviceScope.ServiceProvider.GetRequiredService<IUnitOfWork>();
var settingsDto = await unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (!settingsDto.AllowStatCollection) return;
taskScheduler.ScheduleStatsTasks();
var statsService = serviceScope.ServiceProvider.GetRequiredService<IStatsService>();
await statsService.CollectAndSendStatsData();
}
public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask;
}
}

View File

@ -158,7 +158,7 @@ namespace API.Services
} }
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
{ {
_logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds); _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
} }
@ -191,7 +191,7 @@ namespace API.Services
_unitOfWork.SeriesRepository.Update(series); _unitOfWork.SeriesRepository.Update(series);
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
{ {
_logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
} }

View File

@ -0,0 +1,186 @@
using System;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services.Clients;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class StatsService : IStatsService
{
private const string TempFilePath = "stats/";
private const string TempFileName = "app_stats.json";
private readonly StatsApiClient _client;
private readonly DataContext _dbContext;
private readonly ILogger<StatsService> _logger;
private readonly IUnitOfWork _unitOfWork;
public StatsService(StatsApiClient client, DataContext dbContext, ILogger<StatsService> logger,
IUnitOfWork unitOfWork)
{
_client = client;
_dbContext = dbContext;
_logger = logger;
_unitOfWork = unitOfWork;
}
private static string FinalPath => Path.Combine(Directory.GetCurrentDirectory(), TempFilePath, TempFileName);
private static bool FileExists => File.Exists(FinalPath);
public async Task PathData(ClientInfoDto clientInfoDto)
{
_logger.LogInformation("Pathing client data to the file");
var statisticsDto = await GetData();
statisticsDto.AddClientInfo(clientInfoDto);
await SaveFile(statisticsDto);
}
public async Task CollectRelevantData()
{
_logger.LogInformation("Collecting data from the server and database");
_logger.LogInformation("Collecting usage info");
var usageInfo = await GetUsageInfo();
_logger.LogInformation("Collecting server info");
var serverInfo = GetServerInfo();
await PathData(serverInfo, usageInfo);
}
public async Task FinalizeStats()
{
try
{
_logger.LogInformation("Finalizing Stats collection flow");
var data = await GetExistingData<UsageStatisticsDto>();
_logger.LogInformation("Sending data to the Stats server");
await _client.SendDataToStatsServer(data);
_logger.LogInformation("Deleting the file from disk");
if (FileExists) File.Delete(FinalPath);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error Finalizing Stats collection flow");
throw;
}
}
public async Task CollectAndSendStatsData()
{
await CollectRelevantData();
await FinalizeStats();
}
private async Task PathData(ServerInfoDto serverInfoDto, UsageInfoDto usageInfoDto)
{
_logger.LogInformation("Pathing server and usage info to the file");
var data = await GetData();
data.ServerInfo = serverInfoDto;
data.UsageInfo = usageInfoDto;
data.MarkAsUpdatedNow();
await SaveFile(data);
}
private async ValueTask<UsageStatisticsDto> GetData()
{
if (!FileExists) return new UsageStatisticsDto {InstallId = HashUtil.AnonymousToken()};
return await GetExistingData<UsageStatisticsDto>();
}
private async Task<UsageInfoDto> GetUsageInfo()
{
var usersCount = await _dbContext.Users.CountAsync();
var libsCountByType = await _dbContext.Library
.AsNoTracking()
.GroupBy(x => x.Type)
.Select(x => new LibInfo {Type = x.Key, Count = x.Count()})
.ToArrayAsync();
var uniqueFileTypes = await _unitOfWork.FileRepository.GetFileExtensions();
var usageInfo = new UsageInfoDto
{
UsersCount = usersCount,
LibraryTypesCreated = libsCountByType,
FileTypes = uniqueFileTypes
};
return usageInfo;
}
private static ServerInfoDto GetServerInfo()
{
var serverInfo = new ServerInfoDto
{
Os = RuntimeInformation.OSDescription,
DotNetVersion = Environment.Version.ToString(),
RunTimeVersion = RuntimeInformation.FrameworkDescription,
KavitaVersion = BuildInfo.Version.ToString(),
Culture = Thread.CurrentThread.CurrentCulture.Name,
BuildBranch = BuildInfo.Branch
};
return serverInfo;
}
private async Task<T> GetExistingData<T>()
{
_logger.LogInformation("Fetching existing data from file");
var existingDataJson = await GetFileDataAsString();
_logger.LogInformation("Deserializing data from file to object");
var existingData = JsonSerializer.Deserialize<T>(existingDataJson);
return existingData;
}
private async Task<string> GetFileDataAsString()
{
_logger.LogInformation("Reading file from disk");
return await File.ReadAllTextAsync(FinalPath);
}
private async Task SaveFile(UsageStatisticsDto statisticsDto)
{
_logger.LogInformation("Saving file");
var finalDirectory = FinalPath.Replace(TempFileName, string.Empty);
if (!Directory.Exists(finalDirectory))
{
_logger.LogInformation("Creating tmp directory");
Directory.CreateDirectory(finalDirectory);
}
_logger.LogInformation("Serializing data to write");
var dataJson = JsonSerializer.Serialize(statisticsDto);
_logger.LogInformation("Writing file to the disk");
await File.WriteAllTextAsync(FinalPath, dataJson);
}
}
}

View File

@ -19,11 +19,14 @@ namespace API.Services
private readonly IBackupService _backupService; private readonly IBackupService _backupService;
private readonly ICleanupService _cleanupService; private readonly ICleanupService _cleanupService;
private readonly IStatsService _statsService;
public static BackgroundJobServer Client => new BackgroundJobServer(); public static BackgroundJobServer Client => new BackgroundJobServer();
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService, public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService) IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService,
ICleanupService cleanupService, IStatsService statsService)
{ {
_cacheService = cacheService; _cacheService = cacheService;
_logger = logger; _logger = logger;
@ -32,6 +35,7 @@ namespace API.Services
_metadataService = metadataService; _metadataService = metadataService;
_backupService = backupService; _backupService = backupService;
_cleanupService = cleanupService; _cleanupService = cleanupService;
_statsService = statsService;
} }
public void ScheduleTasks() public void ScheduleTasks()
@ -65,6 +69,33 @@ namespace API.Services
RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily); RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily);
} }
#region StatsTasks
private const string SendDataTask = "finalize-stats";
public void ScheduleStatsTasks()
{
var allowStatCollection = bool.Parse(Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.AllowStatCollection)).GetAwaiter().GetResult().Value);
if (!allowStatCollection)
{
_logger.LogDebug("User has opted out of stat collection, not registering tasks");
return;
}
_logger.LogDebug("Adding StatsTasks");
_logger.LogDebug("Scheduling Send data to the Stats server {Setting}", nameof(Cron.Daily));
RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily);
}
public void CancelStatsTasks()
{
_logger.LogDebug("Cancelling/Removing StatsTasks");
RecurringJob.RemoveIfExists(SendDataTask);
}
#endregion
public void ScanLibrary(int libraryId, bool forceUpdate = false) public void ScanLibrary(int libraryId, bool forceUpdate = false)
{ {
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);

View File

@ -89,7 +89,7 @@ namespace API.Services.Tasks
UpdateLibrary(library, series); UpdateLibrary(library, series);
_unitOfWork.LibraryRepository.Update(library); _unitOfWork.LibraryRepository.Update(library);
if (Task.Run(() => _unitOfWork.Complete()).Result) if (Task.Run(() => _unitOfWork.CommitAsync()).Result)
{ {
_logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); _logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
} }
@ -466,7 +466,7 @@ namespace API.Services.Tasks
return; return;
} }
if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0") if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume)
{ {
info = Parser.Parser.Parse(path, rootPath, type); info = Parser.Parser.Parse(path, rootPath, type);
var info2 = _bookService.ParseInfo(path); var info2 = _bookService.ParseInfo(path);

View File

@ -2,9 +2,9 @@ using System;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Middleware; using API.Middleware;
using API.Services; using API.Services;
using API.Services.HostedServices;
using Hangfire; using Hangfire;
using Hangfire.MemoryStorage; using Hangfire.MemoryStorage;
using Kavita.Common.EnvironmentInfo; using Kavita.Common.EnvironmentInfo;
@ -24,16 +24,18 @@ namespace API
public class Startup public class Startup
{ {
private readonly IConfiguration _config; private readonly IConfiguration _config;
private readonly IWebHostEnvironment _env;
public Startup(IConfiguration config) public Startup(IConfiguration config, IWebHostEnvironment env)
{ {
_config = config; _config = config;
_env = env;
} }
// This method gets called by the runtime. Use this method to add services to the container. // This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services) public void ConfigureServices(IServiceCollection services)
{ {
services.AddApplicationServices(_config); services.AddApplicationServices(_config, _env);
services.AddControllers(); services.AddControllers();
services.Configure<ForwardedHeadersOptions>(options => services.Configure<ForwardedHeadersOptions>(options =>
{ {
@ -62,6 +64,8 @@ namespace API
services.AddResponseCaching(); services.AddResponseCaching();
services.AddStatsClient(_config);
services.AddHangfire(configuration => configuration services.AddHangfire(configuration => configuration
.UseSimpleAssemblyNameTypeSerializer() .UseSimpleAssemblyNameTypeSerializer()
.UseRecommendedSerializerSettings() .UseRecommendedSerializerSettings()
@ -69,11 +73,15 @@ namespace API
// Add the processing server as IHostedService // Add the processing server as IHostedService
services.AddHangfireServer(); services.AddHangfireServer();
// Add IHostedService for startup tasks
// Any services that should be bootstrapped go here
services.AddHostedService<StartupTasksHostedService>();
} }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env,
IHostApplicationLifetime applicationLifetime, ITaskScheduler taskScheduler) IHostApplicationLifetime applicationLifetime)
{ {
app.UseMiddleware<ExceptionMiddleware>(); app.UseMiddleware<ExceptionMiddleware>();
@ -135,9 +143,6 @@ namespace API
{ {
Console.WriteLine($"Kavita - v{BuildInfo.Version}"); Console.WriteLine($"Kavita - v{BuildInfo.Version}");
}); });
// Any services that should be bootstrapped go here
taskScheduler.ScheduleTasks();
} }
private void OnShutdown() private void OnShutdown()

View File

@ -3,6 +3,11 @@
"DefaultConnection": "Data source=kavita.db" "DefaultConnection": "Data source=kavita.db"
}, },
"TokenKey": "super secret unguessable key", "TokenKey": "super secret unguessable key",
"StatsOptions": {
"ServerUrl": "http://localhost:5002",
"ServerSecret": "here's where the api key goes",
"SendDataAt": "23:50"
},
"Logging": { "Logging": {
"LogLevel": { "LogLevel": {
"Default": "Debug", "Default": "Debug",
@ -17,5 +22,6 @@
"FileSizeLimitBytes": 0, "FileSizeLimitBytes": 0,
"MaxRollingFiles": 0 "MaxRollingFiles": 0
} }
} },
"Port": 5000
} }

56
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,56 @@
# How to Contribute #
We're always looking for people to help make Kavita even better, there are a number of ways to contribute.
## Documentation ##
Setup guides, FAQ, the more information we have on the [wiki](https://github.com/Kareadita/Kavita/wiki) the better.
## Development ##
### Tools required ###
- Visual Studio 2019 or higher (https://www.visualstudio.com/vs/). The community version is free and works fine. [Download it here](https://www.visualstudio.com/downloads/).
- Rider (optional to Visual Studio) (https://www.jetbrains.com/rider/)
- HTML/Javascript editor of choice (VS Code/Sublime Text/Webstorm/Atom/etc)
- [Git](https://git-scm.com/downloads)
- [NodeJS](https://nodejs.org/en/download/) (Node 14.X.X or higher)
- .NET 5.0+
### Getting started ###
1. Fork Kavita
2. Clone the repository into your development machine. [*info*](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository-from-github)
- Kavita as of v0.4.2 requires Kavita-webui to be cloned next to the Kavita. Fork and clone this as well.
3. Install the required Node Packages
- cd kavita-webui
- `npm install`
- `npm install -g @angular/cli`
4. Start webui server `ng serve`
5. Build the project in Visual Studio/Rider, Setting startup project to `API`
6. Debug the project in Visual Studio/Rider
7. Open http://localhost:4200
8. (Deployment only) Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs.
### Contributing Code ###
- If you're adding a new, already requested feature, please comment on [Github Issues](https://github.com/Kareadita/Kavita/issues "Github Issues") so work is not duplicated (If you want to add something not already on there, please talk to us first)
- Rebase from Kavita's develop branch, don't merge
- Make meaningful commits, or squash them
- Feel free to make a pull request before work is complete, this will let us see where its at and make comments/suggest improvements
- Reach out to us on the discord if you have any questions
- Add tests (unit/integration)
- Commit with *nix line endings for consistency (We checkout Windows and commit *nix)
- One feature/bug fix per pull request to keep things clean and easy to understand
- Use 4 spaces instead of tabs, this is the default for VS 2019 and WebStorm (to my knowledge)
- Use 2 spaces for Kavita-webui files
### Pull Requesting ###
- Only make pull requests to develop, never master, if you make a PR to master we'll comment on it and close it
- You're probably going to get some comments or questions from us, they will be to ensure consistency and maintainability
- We'll try to respond to pull requests as soon as possible, if its been a day or two, please reach out to us, we may have missed it
- Each PR should come from its own [feature branch](http://martinfowler.com/bliki/FeatureBranch.html) not develop in your fork, it should have a meaningful branch name (what is being added/fixed)
- new-feature (Good)
- fix-bug (Good)
- patch (Bad)
- develop (Bad)
If you have any questions about any of this, please let us know.

View File

@ -1,35 +1,30 @@
#This Dockerfile pulls the latest git commit and builds Kavita from source #This Dockerfile creates a build for all architectures
FROM mcr.microsoft.com/dotnet/sdk:5.0-focal AS builder
MAINTAINER Chris P #Image that copies in the files and passes them to the main image
FROM ubuntu:focal AS copytask
ENV DEBIAN_FRONTEND=noninteractive
ARG TARGETPLATFORM ARG TARGETPLATFORM
#Installs nodejs and npm #Move the output files to where they need to be
RUN curl -fsSL https://deb.nodesource.com/setup_14.x | bash - \ RUN mkdir /files
&& apt-get install -y nodejs \ COPY _output/*.tar.gz /files/
&& rm -rf /var/lib/apt/lists/* COPY Kavita-webui/dist /files/wwwroot
COPY copy_runtime.sh /copy_runtime.sh
#Builds app based on platform RUN /copy_runtime.sh
COPY build_target.sh /build_target.sh
RUN /build_target.sh
#Production image #Production image
FROM ubuntu:focal FROM ubuntu:focal
MAINTAINER Chris P COPY --from=copytask /Kavita /kavita
COPY --from=copytask /files/wwwroot /kavita/wwwroot
#Move the output files to where they need to be
COPY --from=builder /Projects/Kavita/_output/build/Kavita /kavita
#Installs program dependencies #Installs program dependencies
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y libicu-dev libssl1.1 pwgen \ && apt-get install -y libicu-dev libssl1.1 pwgen \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
#Creates the manga storage directory #Creates the data directory
RUN mkdir /manga /kavita/data RUN mkdir /kavita/data
RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \ RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \
&& sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json && sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json

View File

@ -1,28 +0,0 @@
#This Dockerfile is for the musl alpine build of Kavita.
FROM alpine:latest
MAINTAINER Chris P
#Installs the needed dependencies
RUN apk update && apk add --no-cache wget curl pwgen icu-dev bash
#Downloads Kavita, unzips and moves the folders to where they need to be
RUN wget https://github.com/Kareadita/Kavita/releases/download/v0.3.7/kavita-linux-musl-x64.tar.gz \
&& tar -xzf kavita*.tar.gz \
&& mv Kavita/ /kavita/ \
&& rm kavita*.gz \
&& chmod +x /kavita/Kavita
#Creates the needed folders
RUN mkdir /manga /kavita/data /kavita/temp /kavita/cache
RUN sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json
COPY entrypoint.sh /entrypoint.sh
EXPOSE 5000
WORKDIR /kavita
ENTRYPOINT ["/bin/bash"]
CMD ["/entrypoint.sh"]

View File

@ -1,27 +0,0 @@
#This Dockerfile pulls the latest git commit and builds Kavita from source
#Production image
FROM ubuntu:focal
#Move the output files to where they need to be
COPY Kavita /kavita
#Installs program dependencies
RUN apt-get update \
&& apt-get install -y libicu-dev libssl1.1 pwgen \
&& rm -rf /var/lib/apt/lists/*
#Creates the manga storage directory
RUN mkdir /kavita/data
RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \
&& sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json
COPY entrypoint.sh /entrypoint.sh
EXPOSE 5000
WORKDIR /kavita
ENTRYPOINT ["/bin/bash"]
CMD ["/entrypoint.sh"]

12
FUNDING.yml Normal file
View File

@ -0,0 +1,12 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ["https://paypal.me/majora2007"]

View File

@ -1,12 +1,13 @@
using System; using System;
using System.IO; using System.IO;
using System.Text.Json; using System.Text.Json;
using Kavita.Common.EnvironmentInfo;
namespace Kavita.Common namespace Kavita.Common
{ {
public static class Configuration public static class Configuration
{ {
#region JWT Token
public static bool CheckIfJwtTokenSet(string filePath) public static bool CheckIfJwtTokenSet(string filePath)
{ {
try { try {
@ -28,7 +29,6 @@ namespace Kavita.Common
return false; return false;
} }
public static bool UpdateJwtToken(string filePath, string token) public static bool UpdateJwtToken(string filePath, string token)
{ {
try try
@ -42,5 +42,93 @@ namespace Kavita.Common
return false; return false;
} }
} }
#endregion
#region Port
public static bool UpdatePort(string filePath, int port)
{
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
{
return true;
}
try
{
var currentPort = GetPort(filePath);
var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port);
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
public static int GetPort(string filePath)
{
const int defaultPort = 5000;
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
{
return defaultPort;
}
try {
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
const string key = "Port";
if (jsonObj.TryGetProperty(key, out JsonElement tokenElement))
{
return tokenElement.GetInt32();
}
}
catch (Exception ex) {
Console.WriteLine("Error writing app settings: " + ex.Message);
}
return defaultPort;
}
#endregion
#region LogLevel
public static bool UpdateLogLevel(string filePath, string logLevel)
{
try
{
var currentLevel = GetLogLevel(filePath);
var json = File.ReadAllText(filePath).Replace($"\"Default\": \"{currentLevel}\"", $"\"Default\": \"{logLevel}\"");
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
public static string GetLogLevel(string filePath)
{
try {
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement))
{
foreach (var property in tokenElement.EnumerateObject())
{
if (!property.Name.Equals("LogLevel")) continue;
foreach (var logProperty in property.Value.EnumerateObject())
{
if (logProperty.Name.Equals("Default"))
{
return logProperty.Value.GetString();
}
}
}
}
}
catch (Exception ex) {
Console.WriteLine("Error writing app settings: " + ex.Message);
}
return "Information";
}
#endregion
} }
} }

View File

@ -4,7 +4,7 @@
<TargetFramework>net5.0</TargetFramework> <TargetFramework>net5.0</TargetFramework>
<Company>kareadita.github.io</Company> <Company>kareadita.github.io</Company>
<Product>Kavita</Product> <Product>Kavita</Product>
<AssemblyVersion>0.4.1.0</AssemblyVersion> <AssemblyVersion>0.4.2.0</AssemblyVersion>
<NeutralLanguage>en</NeutralLanguage> <NeutralLanguage>en</NeutralLanguage>
</PropertyGroup> </PropertyGroup>

33
Logo/dottrace.svg Normal file
View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="70px" height="70px" viewBox="0 0 70 70" style="enable-background:new 0 0 70 70;" xml:space="preserve">
<g>
<g>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="-1.3318" y1="43.7371" x2="67.0419" y2="26.0967">
<stop offset="0.1237" style="stop-color:#7866FF"/>
<stop offset="0.5376" style="stop-color:#FE2EB6"/>
<stop offset="0.8548" style="stop-color:#FD0486"/>
</linearGradient>
<polygon style="fill:url(#SVGID_1_);" points="67.3,16 43.7,0 0,31.1 11.1,70 58.9,60.3 "/>
<linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="45.9148" y1="38.9098" x2="67.6577" y2="9.0989">
<stop offset="0.1237" style="stop-color:#FF0080"/>
<stop offset="0.2587" style="stop-color:#FE0385"/>
<stop offset="0.4109" style="stop-color:#FA0C92"/>
<stop offset="0.5713" style="stop-color:#F41BA9"/>
<stop offset="0.7363" style="stop-color:#EB2FC8"/>
<stop offset="0.8656" style="stop-color:#E343E6"/>
</linearGradient>
<polygon style="fill:url(#SVGID_2_);" points="67.3,16 43.7,0 38,15.7 38,47.8 70,47.8 "/>
</g>
<g>
<rect x="13.4" y="13.4" style="fill:#000000;" width="43.2" height="43.2"/>
<rect x="17.4" y="48.5" style="fill:#FFFFFF;" width="16.2" height="2.7"/>
<g>
<path style="fill:#FFFFFF;" d="M17.4,19.1h6.9c5.6,0,9.5,3.8,9.5,8.9V28c0,5-3.9,8.9-9.5,8.9h-6.9V19.1z M21.4,22.7v10.7h3
c3.2,0,5.4-2.2,5.4-5.3V28c0-3.2-2.2-5.4-5.4-5.4H21.4z"/>
<polygon style="fill:#FFFFFF;" points="40.3,22.7 34.9,22.7 34.9,19.1 49.6,19.1 49.6,22.7 44.2,22.7 44.2,37 40.3,37 "/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

66
Logo/jetbrains.svg Normal file
View File

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="120.1px" height="130.2px" viewBox="0 0 120.1 130.2" style="enable-background:new 0 0 120.1 130.2;" xml:space="preserve"
>
<g>
<linearGradient id="XMLID_2_" gradientUnits="userSpaceOnUse" x1="31.8412" y1="120.5578" x2="110.2402" y2="73.24">
<stop offset="0" style="stop-color:#FCEE39"/>
<stop offset="1" style="stop-color:#F37B3D"/>
</linearGradient>
<path id="XMLID_3041_" style="fill:url(#XMLID_2_);" d="M118.6,71.8c0.9-0.8,1.4-1.9,1.5-3.2c0.1-2.6-1.8-4.7-4.4-4.9
c-1.2-0.1-2.4,0.4-3.3,1.1l0,0l-83.8,45.9c-1.9,0.8-3.6,2.2-4.7,4.1c-2.9,4.8-1.3,11,3.6,13.9c3.4,2,7.5,1.8,10.7-0.2l0,0l0,0
c0.2-0.2,0.5-0.3,0.7-0.5l78-54.8C117.3,72.9,118.4,72.1,118.6,71.8L118.6,71.8L118.6,71.8z"/>
<linearGradient id="XMLID_3_" gradientUnits="userSpaceOnUse" x1="48.3607" y1="6.9083" x2="119.9179" y2="69.5546">
<stop offset="0" style="stop-color:#EF5A6B"/>
<stop offset="0.57" style="stop-color:#F26F4E"/>
<stop offset="1" style="stop-color:#F37B3D"/>
</linearGradient>
<path id="XMLID_3049_" style="fill:url(#XMLID_3_);" d="M118.8,65.1L118.8,65.1L55,2.5C53.6,1,51.6,0,49.3,0
c-4.3,0-7.7,3.5-7.7,7.7v0c0,2.1,0.8,3.9,2.1,5.3l0,0l0,0c0.4,0.4,0.8,0.7,1.2,1l67.4,57.7l0,0c0.8,0.7,1.8,1.2,3,1.3
c2.6,0.1,4.7-1.8,4.9-4.4C120.2,67.3,119.7,66,118.8,65.1z"/>
<linearGradient id="XMLID_4_" gradientUnits="userSpaceOnUse" x1="52.9467" y1="63.6407" x2="10.5379" y2="37.1562">
<stop offset="0" style="stop-color:#7C59A4"/>
<stop offset="0.3852" style="stop-color:#AF4C92"/>
<stop offset="0.7654" style="stop-color:#DC4183"/>
<stop offset="0.957" style="stop-color:#ED3D7D"/>
</linearGradient>
<path id="XMLID_3042_" style="fill:url(#XMLID_4_);" d="M57.1,59.5C57,59.5,17.7,28.5,16.9,28l0,0l0,0c-0.6-0.3-1.2-0.6-1.8-0.9
c-5.8-2.2-12.2,0.8-14.4,6.6c-1.9,5.1,0.2,10.7,4.6,13.4l0,0l0,0C6,47.5,6.6,47.8,7.3,48c0.4,0.2,45.4,18.8,45.4,18.8l0,0
c1.8,0.8,3.9,0.3,5.1-1.2C59.3,63.7,59,61,57.1,59.5z"/>
<linearGradient id="XMLID_5_" gradientUnits="userSpaceOnUse" x1="52.1736" y1="3.7019" x2="10.7706" y2="37.8971">
<stop offset="0" style="stop-color:#EF5A6B"/>
<stop offset="0.364" style="stop-color:#EE4E72"/>
<stop offset="1" style="stop-color:#ED3D7D"/>
</linearGradient>
<path id="XMLID_3057_" style="fill:url(#XMLID_5_);" d="M49.3,0c-1.7,0-3.3,0.6-4.6,1.5L4.9,28.3c-0.1,0.1-0.2,0.1-0.2,0.2l-0.1,0
l0,0c-1.7,1.2-3.1,3-3.9,5.1C-1.5,39.4,1.5,45.9,7.3,48c3.6,1.4,7.5,0.7,10.4-1.4l0,0l0,0c0.7-0.5,1.3-1,1.8-1.6l34.6-31.2l0,0
c1.8-1.4,3-3.6,3-6.1v0C57.1,3.5,53.6,0,49.3,0z"/>
<g id="XMLID_3008_">
<rect id="XMLID_3033_" x="34.6" y="37.4" style="fill:#000000;" width="51" height="51"/>
<rect id="XMLID_3032_" x="39" y="78.8" style="fill:#FFFFFF;" width="19.1" height="3.2"/>
<g id="XMLID_3009_">
<path id="XMLID_3030_" style="fill:#FFFFFF;" d="M38.8,50.8l1.5-1.4c0.4,0.5,0.8,0.8,1.3,0.8c0.6,0,0.9-0.4,0.9-1.2l0-5.3l2.3,0
l0,5.3c0,1-0.3,1.8-0.8,2.3c-0.5,0.5-1.3,0.8-2.3,0.8C40.2,52.2,39.4,51.6,38.8,50.8z"/>
<path id="XMLID_3028_" style="fill:#FFFFFF;" d="M45.3,43.8l6.7,0v1.9l-4.4,0V47l4,0l0,1.8l-4,0l0,1.3l4.5,0l0,2l-6.7,0
L45.3,43.8z"/>
<path id="XMLID_3026_" style="fill:#FFFFFF;" d="M55,45.8l-2.5,0l0-2l7.3,0l0,2l-2.5,0l0,6.3l-2.3,0L55,45.8z"/>
<path id="XMLID_3022_" style="fill:#FFFFFF;" d="M39,54l4.3,0c1,0,1.8,0.3,2.3,0.7c0.3,0.3,0.5,0.8,0.5,1.4v0
c0,1-0.5,1.5-1.3,1.9c1,0.3,1.6,0.9,1.6,2v0c0,1.4-1.2,2.3-3.1,2.3l-4.3,0L39,54z M43.8,56.6c0-0.5-0.4-0.7-1-0.7l-1.5,0l0,1.5
l1.4,0C43.4,57.3,43.8,57.1,43.8,56.6L43.8,56.6z M43,59l-1.8,0l0,1.5H43c0.7,0,1.1-0.3,1.1-0.8v0C44.1,59.2,43.7,59,43,59z"/>
<path id="XMLID_3019_" style="fill:#FFFFFF;" d="M46.8,54l3.9,0c1.3,0,2.1,0.3,2.7,0.9c0.5,0.5,0.7,1.1,0.7,1.9v0
c0,1.3-0.7,2.1-1.7,2.6l2,2.9l-2.6,0l-1.7-2.5h-1l0,2.5l-2.3,0L46.8,54z M50.6,58c0.8,0,1.2-0.4,1.2-1v0c0-0.7-0.5-1-1.2-1
l-1.5,0v2H50.6z"/>
<path id="XMLID_3016_" style="fill:#FFFFFF;" d="M56.8,54l2.2,0l3.5,8.4l-2.5,0l-0.6-1.5l-3.2,0l-0.6,1.5l-2.4,0L56.8,54z
M58.8,59l-0.9-2.3L57,59L58.8,59z"/>
<path id="XMLID_3014_" style="fill:#FFFFFF;" d="M62.8,54l2.3,0l0,8.3l-2.3,0L62.8,54z"/>
<path id="XMLID_3012_" style="fill:#FFFFFF;" d="M65.7,54l2.1,0l3.4,4.4l0-4.4l2.3,0l0,8.3l-2,0L68,57.8l0,4.6l-2.3,0L65.7,54z"
/>
<path id="XMLID_3010_" style="fill:#FFFFFF;" d="M73.7,61.1l1.3-1.5c0.8,0.7,1.7,1,2.7,1c0.6,0,1-0.2,1-0.6v0
c0-0.4-0.3-0.5-1.4-0.8c-1.8-0.4-3.1-0.9-3.1-2.6v0c0-1.5,1.2-2.7,3.2-2.7c1.4,0,2.5,0.4,3.4,1.1l-1.2,1.6
c-0.8-0.5-1.6-0.8-2.3-0.8c-0.6,0-0.8,0.2-0.8,0.5v0c0,0.4,0.3,0.5,1.4,0.8c1.9,0.4,3.1,1,3.1,2.6v0c0,1.7-1.3,2.7-3.4,2.7
C76.1,62.5,74.7,62,73.7,61.1z"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

124
Logo/kavita.svg Normal file
View File

@ -0,0 +1,124 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 18.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 64 64" enable-background="new 0 0 64 64" xml:space="preserve">
<g>
<g>
<g>
<path fill="#4AC694" d="M32,0c17.7,0,32,14.3,32,32S49.7,64,32,64S0,49.7,0,32S14.3,0,32,0z"/>
</g>
</g>
<g>
<g>
<path fill="#424C72" d="M52,17H12c-0.6,0-1,0.4-1,1v30c0,0.6,0.4,1,1,1h14.3c1,0,1.9,0.4,2.4,1.2c0.7,1.1,1.9,1.8,3.3,1.8
s2.6-0.7,3.3-1.8c0.5-0.8,1.5-1.2,2.4-1.2H52c0.6,0,1-0.4,1-1V18C53,17.4,52.6,17,52,17z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M14,28v18h16c1.1,0,2,0.9,2,2c0-1.1,0.9-2,2-2h16V28H14z"/>
</g>
</g>
<g>
<g>
<path fill="#FFFFFF" d="M35,13c-1.7,0-3,1.3-3,3c0-1.7-1.3-3-3-3H14v31h16c1.1,0,2,0.9,2,2c0-1.1,0.9-2,2-2h16V13H35z"/>
</g>
</g>
<g>
<g>
<rect x="18" y="16" fill="#57D1F7" width="4" height="7"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,26.5H18c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S29.3,26.5,29,26.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,23.5h-4.4c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5H29c0.3,0,0.5,0.2,0.5,0.5S29.3,23.5,29,23.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,20.5h-4.4c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5H29c0.3,0,0.5,0.2,0.5,0.5S29.3,20.5,29,20.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,17.5h-4.4c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5H29c0.3,0,0.5,0.2,0.5,0.5S29.3,17.5,29,17.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,29.5H18c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S29.3,29.5,29,29.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,32.5H18c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S29.3,32.5,29,32.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,35.5H18c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S29.3,35.5,29,35.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,38.5H18c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S29.3,38.5,29,38.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M29,41.5H18c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S29.3,41.5,29,41.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,26.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,26.5,46,26.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,29.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,29.5,46,29.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,20.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,20.5,46,20.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,17.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,17.5,46,17.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,23.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,23.5,46,23.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,32.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,32.5,46,32.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,35.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,35.5,46,35.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,38.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,38.5,46,38.5z"/>
</g>
</g>
<g>
<g>
<path fill="#E4E7EF" d="M46,41.5H35c-0.3,0-0.5-0.2-0.5-0.5s0.2-0.5,0.5-0.5h11c0.3,0,0.5,0.2,0.5,0.5S46.3,41.5,46,41.5z"/>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.7 KiB

50
Logo/resharper.svg Normal file
View File

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="70px" height="70px" viewBox="0 0 70 70" style="enable-background:new 0 0 70 70;" xml:space="preserve">
<g>
<g>
<g>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="22.9451" y1="75.7869" x2="74.7868" y2="20.6415">
<stop offset="1.612903e-002" style="stop-color:#B35BA3"/>
<stop offset="0.4044" style="stop-color:#C41E57"/>
<stop offset="0.4677" style="stop-color:#C41E57"/>
<stop offset="0.6505" style="stop-color:#EB8523"/>
<stop offset="0.9516" style="stop-color:#FEBD11"/>
</linearGradient>
<polygon style="fill:url(#SVGID_1_);" points="49.8,15.2 36,36.7 58.4,70 70,23.1 "/>
<linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="17.7187" y1="73.2922" x2="69.5556" y2="18.1519">
<stop offset="1.612903e-002" style="stop-color:#B35BA3"/>
<stop offset="0.4044" style="stop-color:#C41E57"/>
<stop offset="0.4677" style="stop-color:#C41E57"/>
<stop offset="0.7043" style="stop-color:#EB8523"/>
</linearGradient>
<polygon style="fill:url(#SVGID_2_);" points="51.1,15.7 49,0 18.8,33.6 27.6,42.3 20.8,70 58.4,70 "/>
</g>
<linearGradient id="SVGID_3_" gradientUnits="userSpaceOnUse" x1="1.8281" y1="53.4275" x2="48.8245" y2="9.2255">
<stop offset="1.612903e-002" style="stop-color:#B35BA3"/>
<stop offset="0.6613" style="stop-color:#C41E57"/>
</linearGradient>
<polygon style="fill:url(#SVGID_3_);" points="49,0 11.6,0 0,47.1 55.6,47.1 "/>
<linearGradient id="SVGID_4_" gradientUnits="userSpaceOnUse" x1="49.8935" y1="-11.5569" x2="48.8588" y2="24.0352">
<stop offset="0.5" style="stop-color:#C41E57"/>
<stop offset="0.6668" style="stop-color:#D13F48"/>
<stop offset="0.7952" style="stop-color:#D94F39"/>
<stop offset="0.8656" style="stop-color:#DD5433"/>
</linearGradient>
<polygon style="fill:url(#SVGID_4_);" points="55.3,47.1 51.1,15.7 49,0 41.7,23 "/>
</g>
<g>
<rect x="13.4" y="13.5" transform="matrix(-1 2.577289e-003 -2.577289e-003 -1 70.0288 70.081)" style="fill:#000000;" width="43.2" height="43.2"/>
<rect x="17.6" y="48.6" transform="matrix(1 -2.577289e-003 2.577289e-003 1 -0.1287 6.634109e-002)" style="fill:#FFFFFF;" width="16.2" height="2.7"/>
<path style="fill:#FFFFFF;" d="M17.4,19.1l8.2,0c2.3,0,4,0.6,5.2,1.8c1,1,1.5,2.4,1.5,4.1l0,0.1c0,1.5-0.3,2.6-1.1,3.5
c-0.7,0.9-1.6,1.6-2.8,2l4.4,6.4l-4.6,0l-3.7-5.5l-3.3,0l0,5.5l-3.9,0L17.4,19.1z M25.3,27.8c1,0,1.7-0.2,2.2-0.7
c0.5-0.5,0.8-1.1,0.8-1.8l0-0.1c0-0.9-0.3-1.5-0.8-1.9c-0.5-0.4-1.3-0.6-2.3-0.6l-3.9,0l0,5.1L25.3,27.8z"/>
<path style="fill:#FFFFFF;" d="M36,33.2l-1.9,0l0-3.3l2.5,0l0.6-3.8l-2.3,0l0-3.3l2.8,0l0.6-3.7l3.4,0l-0.6,3.7l3.7,0l0.6-3.7
l3.4,0l-0.6,3.7l1.9,0l0,3.3l-2.5,0L47,29.9l2.3,0l0,3.3l-2.8,0L45.8,37l-3.4,0l0.7-3.8l-3.7,0L38.7,37l-3.4,0L36,33.2z
M43.7,29.9l0.6-3.8l-3.7,0L40,29.9L43.7,29.9z"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.1 KiB

42
Logo/rider.svg Normal file
View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="70px" height="70px" viewBox="0 0 70 70" style="enable-background:new 0 0 70 70;" xml:space="preserve">
<defs>
<linearGradient id="linear-gradient" x1="70.22612" y1="27.79912" x2="-5.13024" y2="63.12242" gradientTransform="matrix(1, 0, 0, -1, 0, 71.27997)" gradientUnits="userSpaceOnUse">
<stop offset="0" stop-color="#c90f5e"/>
<stop offset="0.22111" stop-color="#c90f5e"/>
<stop offset="0.2356" stop-color="#c90f5e"/>
<stop offset="0.35559" stop-color="#ca135c"/>
<stop offset="0.46633" stop-color="#ce1e57"/>
<stop offset="0.5735" stop-color="#d4314e"/>
<stop offset="0.67844" stop-color="#dc4b41"/>
<stop offset="0.78179" stop-color="#e66d31"/>
<stop offset="0.88253" stop-color="#f3961d"/>
<stop offset="0.94241" stop-color="#fcb20f"/>
</linearGradient>
<linearGradient id="linear-gradient-2" x1="24.65904" y1="61.99608" x2="46.04762" y2="2.93445" gradientTransform="matrix(1, 0, 0, -1, 0, 71.27997)" gradientUnits="userSpaceOnUse">
<stop offset="0.04188" stop-color="#077cfb"/>
<stop offset="0.44503" stop-color="#c90f5e"/>
<stop offset="0.95812" stop-color="#077cfb"/>
</linearGradient>
<linearGradient id="linear-gradient-3" x1="17.39552" y1="63.34592" x2="33.19389" y2="7.20092" gradientTransform="matrix(1, 0, 0, -1, 0, 71.27997)" gradientUnits="userSpaceOnUse">
<stop offset="0.27749" stop-color="#c90f5e"/>
<stop offset="0.97382" stop-color="#fcb20f"/>
</linearGradient>
</defs>
<title>rider</title>
<g>
<polygon points="70 27.237 63.391 23.75 20.926 0 3.827 17.921 21.619 41.068 60.537 44.397 70 27.237" fill="url(#linear-gradient)"/>
<polygon points="50.423 16.132 44.271 1.107 27.643 17.471 11.768 50.194 49.411 70 70 57.98 50.423 16.132" fill="url(#linear-gradient-2)"/>
<polygon points="20.926 0 0 14.095 7.779 62.172 27.848 69.889 53.78 48.823 20.926 0" fill="url(#linear-gradient-3)"/>
</g>
<g>
<rect x="13.30219" y="13.19311" width="43.61371" height="43.61371"/>
<g>
<path d="M17.22741,18.86293h8.39564a7.38416,7.38416,0,0,1,5.34268,1.85358,5.86989,5.86989,0,0,1,1.52648,4.1433h0A5.74339,5.74339,0,0,1,28.567,30.5296l4.47041,6.54206H28.34891L24.42368,31.1838h-3.162v5.88785H17.22741V18.86293h0ZM25.296,27.69471c1.96262,0,3.053-1.09034,3.053-2.61682h0c0-1.74455-1.19938-2.61682-3.162-2.61682H21.15265v5.23365H25.296Z" fill="#fff"/>
<path d="M36.09034,18.86293H43.2866c5.77882,0,9.70405,3.92523,9.70405,9.15888h0c0,5.12461-3.92523,9.15888-9.70405,9.15888H36.09034V18.86293Zm4.03427,3.59813V33.47352h3.162a5.23727,5.23727,0,0,0,5.56075-5.45171h0a5.26493,5.26493,0,0,0-5.56075-5.56075h-3.162Z" fill="#fff"/>
</g>
<rect x="17.22741" y="48.62925" width="16.35514" height="2.72586" fill="#fff"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.0 KiB

1
Logo/sentry.svg Normal file
View File

@ -0,0 +1 @@
<svg class="css-15xgryy e10nushx5" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 222 66" width="400" height="119"><path d="M29,2.26a4.67,4.67,0,0,0-8,0L14.42,13.53A32.21,32.21,0,0,1,32.17,40.19H27.55A27.68,27.68,0,0,0,12.09,17.47L6,28a15.92,15.92,0,0,1,9.23,12.17H4.62A.76.76,0,0,1,4,39.06l2.94-5a10.74,10.74,0,0,0-3.36-1.9l-2.91,5a4.54,4.54,0,0,0,1.69,6.24A4.66,4.66,0,0,0,4.62,44H19.15a19.4,19.4,0,0,0-8-17.31l2.31-4A23.87,23.87,0,0,1,23.76,44H36.07a35.88,35.88,0,0,0-16.41-31.8l4.67-8a.77.77,0,0,1,1.05-.27c.53.29,20.29,34.77,20.66,35.17a.76.76,0,0,1-.68,1.13H40.6q.09,1.91,0,3.81h4.78A4.59,4.59,0,0,0,50,39.43a4.49,4.49,0,0,0-.62-2.28Z M124.32,28.28,109.56,9.22h-3.68V34.77h3.73V15.19l15.18,19.58h3.26V9.22h-3.73ZM87.15,23.54h13.23V20.22H87.14V12.53h14.93V9.21H83.34V34.77h18.92V31.45H87.14ZM71.59,20.3h0C66.44,19.06,65,18.08,65,15.7c0-2.14,1.89-3.59,4.71-3.59a12.06,12.06,0,0,1,7.07,2.55l2-2.83a14.1,14.1,0,0,0-9-3c-5.06,0-8.59,3-8.59,7.27,0,4.6,3,6.19,8.46,7.52C74.51,24.74,76,25.78,76,28.11s-2,3.77-5.09,3.77a12.34,12.34,0,0,1-8.3-3.26l-2.25,2.69a15.94,15.94,0,0,0,10.42,3.85c5.48,0,9-2.95,9-7.51C79.75,23.79,77.47,21.72,71.59,20.3ZM195.7,9.22l-7.69,12-7.64-12h-4.46L186,24.67V34.78h3.84V24.55L200,9.22Zm-64.63,3.46h8.37v22.1h3.84V12.68h8.37V9.22H131.08ZM169.41,24.8c3.86-1.07,6-3.77,6-7.63,0-4.91-3.59-8-9.38-8H154.67V34.76h3.8V25.58h6.45l6.48,9.2h4.44l-7-9.82Zm-10.95-2.5V12.6h7.17c3.74,0,5.88,1.77,5.88,4.84s-2.29,4.86-5.84,4.86Z" transform="translate(11, 11)" fill="#ffffff"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -1,4 +1,4 @@
# Kavita # [<img src="/Logo/kavita.svg" width="32" alt="">]() Kavita
<div align="center"> <div align="center">
![Cover Image](https://github.com/Kareadita/kareadita.github.io/blob/main/img/features/seriesdetail.PNG?raw=true) ![Cover Image](https://github.com/Kareadita/kareadita.github.io/blob/main/img/features/seriesdetail.PNG?raw=true)
@ -9,44 +9,40 @@ your reading collection with your friends and family!
[![Release](https://img.shields.io/github/release/Kareadita/Kavita.svg?style=flat&maxAge=3600)](https://github.com/Kareadita/Kavita/releases) [![Release](https://img.shields.io/github/release/Kareadita/Kavita.svg?style=flat&maxAge=3600)](https://github.com/Kareadita/Kavita/releases)
[![License](https://img.shields.io/badge/license-GPLv3-blue.svg?style=flat)](https://github.com/Kareadita/Kavita/blob/master/LICENSE) [![License](https://img.shields.io/badge/license-GPLv3-blue.svg?style=flat)](https://github.com/Kareadita/Kavita/blob/master/LICENSE)
[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem)
[![Downloads](https://img.shields.io/github/downloads/Kareadita/Kavita/total.svg?style=flat)](https://github.com/Kareadita/Kavita/releases) [![Downloads](https://img.shields.io/github/downloads/Kareadita/Kavita/total.svg?style=flat)](https://github.com/Kareadita/Kavita/releases)
[![Docker Pulls](https://img.shields.io/docker/pulls/kizaing/kavita.svg)](https://hub.docker.com/r/kizaing/kavita/) [![Docker Pulls](https://img.shields.io/docker/pulls/kizaing/kavita.svg)](https://hub.docker.com/r/kizaing/kavita/)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=alert_status)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita)
[![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita)
[![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=security_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=security_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita)
[![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) [![Backers on Open Collective](https://opencollective.com/kavita/backers/badge.svg)](#backers)
[![Sponsors on Open Collective](https://opencollective.com/kavita/sponsors/badge.svg)](#sponsors)
</div> </div>
## Goals: ## Goals
- [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf) - [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf)
- [x] First class responsive readers that work great on any device - [x] First class responsive readers that work great on any device (phone, tablet, desktop)
- [x] Provide a dark theme for web app - [x] Dark and Light themes
- [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books - [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books
- [ ] Metadata should allow for collections, want to read integration from 3rd party services, genres. - [ ] Metadata should allow for collections, want to read integration from 3rd party services, genres.
- [x] Ability to manage users, access, and ratings - [x] Ability to manage users, access, and ratings
- [ ] Ability to sync ratings and reviews to external services - [ ] Ability to sync ratings and reviews to external services
- [x] Fully Accessible - [x] Fully Accessible with active accessibility audits
- [x] Dedicated webtoon reader (in beta testing)
- [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects) - [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects)
## Support
[![Reddit](https://img.shields.io/badge/reddit-discussion-FF4500.svg?maxAge=60)](https://www.reddit.com/r/KavitaManga/)
[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem)
[![GitHub - Bugs and Feature Requests Only](https://img.shields.io/badge/github-issues-red.svg?maxAge=60)](https://github.com/Kareadita/Kavita/issues)
# How to contribute ## Setup
- Ensure you've cloned Kavita-webui. You should have Projects/Kavita and Projects/Kavita-webui ### Non-Docker
- In Kavita-webui, run ng serve. This will start the webserver on localhost:4200
- Run API project in Kavita, this will start the backend on localhost:5000
## Deploy local build
- Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs.
## How to install
- Unzip the archive for your target OS - Unzip the archive for your target OS
- Place in a directory that is writable. If on windows, do not place in Program Files - Place in a directory that is writable. If on windows, do not place in Program Files
- Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once) - Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once)
- Run Kavita - Run Kavita
- If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices. - If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices.
## Docker ### Docker
Running your Kavita server in docker is super easy! Barely an inconvenience. You can run it with this command: Running your Kavita server in docker is super easy! Barely an inconvenience. You can run it with this command:
``` ```
@ -72,17 +68,49 @@ services:
restart: unless-stopped restart: unless-stopped
``` ```
**Note: Kavita is under heavy development and is being updated all the time, so the tag for current builds is :nightly. The :latest tag will be the latest stable release. There is also the :alpine tag if you want a smaller image, but it is only available for x64 systems.** **Note: Kavita is under heavy development and is being updated all the time, so the tag for current builds is `:nightly`. The `:latest` tag will be the latest stable release. There is also the `:alpine` tag if you want a smaller image, but it is only available for x64 systems.**
## Got an Idea? ## Feature Requests
Got a great idea? Throw it up on the FeatHub or vote on another persons. Please check the [Project Board](https://github.com/Kareadita/Kavita/projects) first for a list of planned features. Got a great idea? Throw it up on the FeatHub or vote on another idea. Please check the [Project Board](https://github.com/Kareadita/Kavita/projects) first for a list of planned features.
[![Feature Requests](https://feathub.com/Kareadita/Kavita?format=svg)](https://feathub.com/Kareadita/Kavita) [![Feature Requests](https://feathub.com/Kareadita/Kavita?format=svg)](https://feathub.com/Kareadita/Kavita)
## Want to help?
I am looking for developers with a passion for building the next Plex for Reading. Developers with C#/ASP.NET, Angular 11 please reach out on [Discord](https://discord.gg/eczRp9eeem). ## Contributors
This project exists thanks to all the people who contribute. [Contribute](CONTRIBUTING.md).
<a href="https://github.com/Kareadita/Kavita/graphs/contributors"><img src="https://opencollective.com/kavita/contributors.svg?width=890&button=false" /></a>
## Donate ## Donate
If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will go towards
likely go towards beer or hosting. expenses related to Kavita. Back us through [OpenCollective](https://opencollective.com/Kavita#backer).
[![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US)
## Backers
Thank you to all our backers! 🙏 [Become a backer](https://opencollective.com/Kavita#backer)
<img src="https://opencollective.com/Kavita/backers.svg?width=890"></a>
## Sponsors
Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [Become a sponsor](https://opencollective.com/Kavita#sponsor)
<img src="https://opencollective.com/Kavita/sponsors.svg?width=890"></a>
## Mega Sponsors
<img src="https://opencollective.com/Kavita/tiers/mega-sponsor.svg?width=890"></a>
## JetBrains
Thank you to [<img src="/Logo/jetbrains.svg" alt="" width="32"> JetBrains](http://www.jetbrains.com/) for providing us with free licenses to their great tools.
* [<img src="/Logo/rider.svg" alt="" width="32"> Rider](http://www.jetbrains.com/rider/)
* [<img src="/Logo/dottrace.svg" alt="" width="32"> dotTrace](http://www.jetbrains.com/dottrace/)
## Sentry
Thank you to [<img src="/Logo/sentry.svg" alt="" width="32"> Sentry](https://sentry.io/welcome/) for providing us with free license to their software.
### License
* [GNU GPL v3](http://www.gnu.org/licenses/gpl.html)
* Copyright 2010-2021

103
action-build.sh Executable file
View File

@ -0,0 +1,103 @@
#! /bin/bash
set -e
outputFolder='_output'
ProgressStart()
{
echo "Start '$1'"
}
ProgressEnd()
{
echo "Finish '$1'"
}
Build()
{
local RID="$1"
ProgressStart "Build for $RID"
slnFile=Kavita.sln
dotnet clean $slnFile -c Release
dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID
ProgressEnd "Build for $RID"
}
Package()
{
local framework="$1"
local runtime="$2"
local lOutputFolder=../_output/"$runtime"/Kavita
ProgressStart "Creating $runtime Package for $framework"
# TODO: Use no-restore? Because Build should have already done it for us
echo "Building"
cd API
echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
echo "Renaming API -> Kavita"
mv "$lOutputFolder"/API "$lOutputFolder"/Kavita
echo "Copying webui wwwroot to build"
cp -r wwwroot/* "$lOutputFolder"/wwwroot/
echo "Copying Install information"
cp ../INSTALL.txt "$lOutputFolder"/README.txt
echo "Copying LICENSE"
cp ../LICENSE "$lOutputFolder"/LICENSE.txt
echo "Creating tar"
cd ../$outputFolder/"$runtime"/
tar -czvf ../kavita-$runtime.tar.gz Kavita
ProgressEnd "Creating $runtime Package for $framework"
}
BuildUI()
{
ProgressStart 'Building UI'
echo 'Removing old wwwroot'
rm -rf API/wwwroot/*
cd ../Kavita-webui/ || exit
echo 'Installing web dependencies'
npm install
echo 'Building UI'
npm run prod
ls -l dist
echo 'Copying back to Kavita wwwroot'
cp -r dist/* ../Kavita/API/wwwroot
ls -l ../Kavita/API/wwwroot
cd ../Kavita/ || exit
ProgressEnd 'Building UI'
}
dir=$PWD
if [ -d _output ]
then
rm -r _output/
fi
#Build for x64
Build "linux-x64"
Package "net5.0" "linux-x64"
cd "$dir"
#Build for arm
Build "linux-arm"
Package "net5.0" "linux-arm"
cd "$dir"
#Build for arm64
Build "linux-arm64"
Package "net5.0" "linux-arm64"
cd "$dir"

View File

@ -15,6 +15,7 @@ ProgressEnd()
UpdateVersionNumber() UpdateVersionNumber()
{ {
# TODO: Enhance this to increment version number in KavitaCommon.csproj
if [ "$KAVITAVERSION" != "" ]; then if [ "$KAVITAVERSION" != "" ]; then
echo "Updating Version Info" echo "Updating Version Info"
sed -i'' -e "s/<AssemblyVersion>[0-9.*]\+<\/AssemblyVersion>/<AssemblyVersion>$KAVITAVERSION<\/AssemblyVersion>/g" src/Directory.Build.props sed -i'' -e "s/<AssemblyVersion>[0-9.*]\+<\/AssemblyVersion>/<AssemblyVersion>$KAVITAVERSION<\/AssemblyVersion>/g" src/Directory.Build.props
@ -31,7 +32,6 @@ Build()
slnFile=Kavita.sln slnFile=Kavita.sln
dotnet clean $slnFile -c Debug
dotnet clean $slnFile -c Release dotnet clean $slnFile -c Release
if [[ -z "$RID" ]]; if [[ -z "$RID" ]];
@ -47,9 +47,15 @@ Build()
BuildUI() BuildUI()
{ {
ProgressStart 'Building UI' ProgressStart 'Building UI'
echo 'Removing old wwwroot'
rm -rf API/wwwroot/*
cd ../Kavita-webui/ || exit cd ../Kavita-webui/ || exit
echo 'Installing web dependencies'
npm install npm install
echo 'Building UI'
npm run prod npm run prod
echo 'Copying back to Kavita wwwroot'
cp -r dist/* ../Kavita/API/wwwroot
cd ../Kavita/ || exit cd ../Kavita/ || exit
ProgressEnd 'Building UI' ProgressEnd 'Building UI'
} }
@ -68,6 +74,9 @@ Package()
echo dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework echo dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
echo "Recopying wwwroot due to bug"
cp -r ./wwwroot/* $lOutputFolder/wwwroot
echo "Copying Install information" echo "Copying Install information"
cp ../INSTALL.txt "$lOutputFolder"/README.txt cp ../INSTALL.txt "$lOutputFolder"/README.txt
@ -92,8 +101,8 @@ Package()
RID="$1" RID="$1"
Build
BuildUI BuildUI
Build
dir=$PWD dir=$PWD

View File

@ -1,27 +0,0 @@
#!/bin/bash
mkdir Projects
cd Projects
git clone https://github.com/Kareadita/Kavita.git
git clone https://github.com/Kareadita/Kavita-webui.git
cd Kavita
chmod +x build.sh
#Builds program based on the target platform
if [ "$TARGETPLATFORM" == "linux/amd64" ]
then
./build.sh linux-x64
mv /Projects/Kavita/_output/linux-x64 /Projects/Kavita/_output/build
elif [ "$TARGETPLATFORM" == "linux/arm/v7" ]
then
./build.sh linux-arm
mv /Projects/Kavita/_output/linux-arm /Projects/Kavita/_output/build
elif [ "$TARGETPLATFORM" == "linux/arm64" ]
then
./build.sh linux-arm64
mv /Projects/Kavita/_output/linux-arm64 /Projects/Kavita/_output/build
fi

16
copy_runtime.sh Executable file
View File

@ -0,0 +1,16 @@
#!/bin/bash
#Copies the correct version of Kavita into the image
set -xv
if [ "$TARGETPLATFORM" == "linux/amd64" ]
then
tar xf /files/kavita-linux-x64.tar.gz -C /
elif [ "$TARGETPLATFORM" == "linux/arm/v7" ]
then
tar xf /files/kavita-linux-arm.tar.gz -C /
elif [ "$TARGETPLATFORM" == "linux/arm64" ]
then
tar xf /files/kavita-linux-arm64.tar.gz -C /
fi

111
docker-build.sh Normal file
View File

@ -0,0 +1,111 @@
#! /bin/bash
set -e
outputFolder='_output'
ProgressStart()
{
echo "Start '$1'"
}
ProgressEnd()
{
echo "Finish '$1'"
}
Build()
{
local RID="$1"
ProgressStart 'Build for $RID'
slnFile=Kavita.sln
dotnet clean $slnFile -c Debug
dotnet clean $slnFile -c Release
dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID
ProgressEnd 'Build for $RID'
}
BuildUI()
{
ProgressStart 'Building UI'
cd ../Kavita-webui/ || exit
npm install
npm run prod
cd ../Kavita/ || exit
ProgressEnd 'Building UI'
ProgressStart 'Building UI'
echo 'Removing old wwwroot'
rm -rf API/wwwroot/*
cd ../Kavita-webui/ || exit
echo 'Installing web dependencies'
npm install
echo 'Building UI'
npm run prod
echo 'Copying back to Kavita wwwroot'
cp -r dist/* ../Kavita/API/wwwroot
cd ../Kavita/ || exit
ProgressEnd 'Building UI'
}
Package()
{
local framework="$1"
local runtime="$2"
local lOutputFolder=../_output/"$runtime"/Kavita
ProgressStart "Creating $runtime Package for $framework"
# TODO: Use no-restore? Because Build should have already done it for us
echo "Building"
cd API
echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
echo "Copying Install information"
cp ../INSTALL.txt "$lOutputFolder"/README.txt
echo "Copying LICENSE"
cp ../LICENSE "$lOutputFolder"/LICENSE.txt
echo "Renaming API -> Kavita"
mv "$lOutputFolder"/API "$lOutputFolder"/Kavita
echo "Creating tar"
cd ../$outputFolder/"$runtime"/
tar -czvf ../kavita-$runtime.tar.gz Kavita
ProgressEnd "Creating $runtime Package for $framework"
}
dir=$PWD
if [ -d _output ]
then
rm -r _output/
fi
BuildUI
#Build for x64
Build "linux-x64"
Package "net5.0" "linux-x64"
cd "$dir"
#Build for arm
Build "linux-arm"
Package "net5.0" "linux-arm"
cd "$dir"
#Build for arm64
Build "linux-arm64"
Package "net5.0" "linux-arm64"
cd "$dir"
#Builds Docker images
docker buildx build -t kizaing/kavita:nightly --platform linux/amd64,linux/arm/v7,linux/arm64 . --push

View File

@ -13,7 +13,7 @@ then
rm /kavita/appsettings.json rm /kavita/appsettings.json
ln -s /kavita/data/appsettings.json /kavita/ ln -s /kavita/data/appsettings.json /kavita/
else else
mv /kavita/appsettings.json /kavita/data/ mv /kavita/appsettings.json /kavita/data/ || true
ln -s /kavita/data/appsettings.json /kavita/ ln -s /kavita/data/appsettings.json /kavita/
fi fi
@ -55,11 +55,11 @@ then
else else
if [ -d /kavita/data/logs ] if [ -d /kavita/data/logs ]
then then
touch /kavita/data/logs/kavita.log echo "" > /kavita/data/logs/kavita.log || true
ln -s /kavita/data/logs/kavita.log /kavita/ ln -s /kavita/data/logs/kavita.log /kavita/
else else
mkdir /kavita/data/logs mkdir /kavita/data/logs
touch /kavita/data/logs/kavita.log echo "" > /kavita/data/logs/kavita.log || true
ln -s /kavita/data/logs/kavita.log /kavita/ ln -s /kavita/data/logs/kavita.log /kavita/
fi fi