diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..8bca3c1ef --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: kavita # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: ["https://paypal.me/majora2007"] diff --git a/.github/workflows/discord-release-msg.yml b/.github/workflows/discord-release-msg.yml new file mode 100644 index 000000000..f3acd153e --- /dev/null +++ b/.github/workflows/discord-release-msg.yml @@ -0,0 +1,17 @@ +name: Release messages to discord announcement channel + +on: + release: + types: + - created + +jobs: + run_main: + runs-on: ubuntu-18.04 + name: Sends custom message + steps: + - name: Sending message + uses: nhevia/discord-styled-releases@main + with: + webhook_id: ${{ secrets.DISCORD_WEBHOOK_ID }} + webhook_token: ${{ secrets.DISCORD_WEBHOOK_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index c42f0a5eb..c39180b0c 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -1,4 +1,4 @@ -name: CI to Docker Hub +name: Build Nightly Docker on: push: @@ -13,12 +13,46 @@ jobs: - name: Check Out Repo uses: actions/checkout@v2 + - name: Check Out WebUI + uses: actions/checkout@v2 + with: + repository: Kareadita/Kavita-webui + ref: develop + path: Kavita-webui/ + + - name: NodeJS to Compile WebUI + uses: actions/setup-node@v2.1.5 + with: + node-version: '14' + - run: | + + cd Kavita-webui/ || exit + echo 'Installing web dependencies' + npm install + + echo 'Building UI' + npm run prod + + echo 'Copying back to Kavita wwwroot' + rsync -a dist/ ../API/wwwroot/ + + cd ../ || exit + + - name: Compile dotnet app + uses: actions/setup-dotnet@v1 + with: + dotnet-version: '5.0.x' + - run: ./action-build.sh + - name: Login to Docker Hub uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v1 @@ -27,10 +61,19 @@ jobs: id: docker_build uses: docker/build-push-action@v2 with: - context: ./ - file: ./Dockerfile + context: . + platforms: linux/amd64,linux/arm/v7,linux/arm64 push: true - tags: kizaing/kavita:nightly-amd64 + tags: kizaing/kavita:nightly - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} \ No newline at end of file + run: echo ${{ steps.docker_build.outputs.digest }} + + - name: Notify Discord + uses: rjstone/discord-webhook-notify@v1 + with: + severity: info + description: + details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated' + text: A new nightly build has been released for docker. + webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }} diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml new file mode 100644 index 000000000..3ac5b818f --- /dev/null +++ b/.github/workflows/stable-docker.yml @@ -0,0 +1,79 @@ +name: Build Stable Docker + +on: + push: + branches: + - 'main' + +jobs: + docker: + runs-on: ubuntu-latest + steps: + + - name: Check Out Repo + uses: actions/checkout@v2 + + - name: Check Out WebUI + uses: actions/checkout@v2 + with: + repository: Kareadita/Kavita-webui + ref: main + path: Kavita-webui/ + + - name: NodeJS to Compile WebUI + uses: actions/setup-node@v2.1.5 + with: + node-version: '14' + - run: | + + cd Kavita-webui/ || exit + echo 'Installing web dependencies' + npm install + + echo 'Building UI' + npm run prod + + echo 'Copying back to Kavita wwwroot' + rsync -a dist/ ../API/wwwroot/ + + cd ../ || exit + + - name: Compile dotnet app + uses: actions/setup-dotnet@v1 + with: + dotnet-version: '5.0.x' + - run: ./action-build.sh + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: . + platforms: linux/amd64,linux/arm/v7,linux/arm64 + push: true + tags: kizaing/kavita:latest + + - name: Image digest + run: echo ${{ steps.docker_build.outputs.digest }} + + - name: Notify Discord + uses: rjstone/discord-webhook-notify@v1 + with: + severity: info + description: + details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated' + text: A new stable build has been released for docker. + webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index 59b1114f5..8db0960dc 100644 --- a/.gitignore +++ b/.gitignore @@ -453,4 +453,5 @@ cache/ /API/cache/ /API/temp/ _temp/ -_output/ \ No newline at end of file +_output/ +stats/ \ No newline at end of file diff --git a/API.Tests/Comparers/NaturalSortComparerTest.cs b/API.Tests/Comparers/NaturalSortComparerTest.cs index 099da0546..d7c58d45a 100644 --- a/API.Tests/Comparers/NaturalSortComparerTest.cs +++ b/API.Tests/Comparers/NaturalSortComparerTest.cs @@ -38,6 +38,14 @@ namespace API.Tests.Comparers new[] {"Batman - Black white vol 1 #04.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr"}, new[] {"Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #04.cbr"} )] + [InlineData( + new[] {"3and4.cbz", "The World God Only Knows - Oneshot.cbz", "5.cbz", "1and2.cbz"}, + new[] {"1and2.cbz", "3and4.cbz", "5.cbz", "The World God Only Knows - Oneshot.cbz"} + )] + [InlineData( + new[] {"Solo Leveling - c000 (v01) - p000 [Cover] [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p001 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p002 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p003 [dig] [Yen Press] [LuCaZ].jpg"}, + new[] {"Solo Leveling - c000 (v01) - p000 [Cover] [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p001 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p002 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p003 [dig] [Yen Press] [LuCaZ].jpg"} + )] public void TestNaturalSortComparer(string[] input, string[] expected) { Array.Sort(input, _nc); diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs index 9d91a5feb..6e33dd89c 100644 --- a/API.Tests/Parser/ComicParserTests.cs +++ b/API.Tests/Parser/ComicParserTests.cs @@ -20,6 +20,8 @@ namespace API.Tests.Parser [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] + [InlineData("Amazing Man Comics chapter 25", "Amazing Man Comics")] + [InlineData("Amazing Man Comics issue #25", "Amazing Man Comics")] public void ParseComicSeriesTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); @@ -40,6 +42,7 @@ namespace API.Tests.Parser [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] [InlineData("Superman v1 024 (09-10 1943)", "1")] + [InlineData("Amazing Man Comics chapter 25", "0")] public void ParseComicVolumeTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); @@ -61,6 +64,7 @@ namespace API.Tests.Parser [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] [InlineData("Superman v1 024 (09-10 1943)", "24")] [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] + [InlineData("Amazing Man Comics chapter 25", "25")] public void ParseComicChapterTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index e09166585..1123404b0 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -145,6 +145,7 @@ namespace API.Tests.Parser [InlineData("X-Men v1 #201 (September 2007).cbz", "X-Men")] [InlineData("Kodoja #001 (March 2016)", "Kodoja")] [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")] + [InlineData("Kiss x Sis - Ch.36 - A Cold Home Visit.cbz", "Kiss x Sis")] public void ParseSeriesTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); @@ -241,7 +242,9 @@ namespace API.Tests.Parser [InlineData("Ani-Hina Art Collection.cbz", true)] [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] [InlineData("A Town Where You Live - Bonus Chapter.zip", true)] - [InlineData("Yuki Merry - 4-Komga Anthology", true)] + [InlineData("Yuki Merry - 4-Komga Anthology", false)] + [InlineData("Beastars - SP01", false)] + [InlineData("Beastars SP01", false)] public void ParseMangaSpecialTest(string input, bool expected) { Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs index 314c7cd11..4a1a1babd 100644 --- a/API.Tests/Parser/ParserTest.cs +++ b/API.Tests/Parser/ParserTest.cs @@ -5,6 +5,16 @@ namespace API.Tests.Parser { public class ParserTests { + + [Theory] + [InlineData("Beastars - SP01", true)] + [InlineData("Beastars SP01", true)] + [InlineData("Beastars Special 01", false)] + [InlineData("Beastars Extra 01", false)] + public void HasSpecialTest(string input, bool expected) + { + Assert.Equal(expected, HasSpecialMarker(input)); + } [Theory] [InlineData("0001", "1")] diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index d907ab75a..50d2d0673 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -16,11 +16,12 @@ namespace API.Tests.Services private readonly ITestOutputHelper _testOutputHelper; private readonly ArchiveService _archiveService; private readonly ILogger _logger = Substitute.For>(); + private readonly ILogger _directoryServiceLogger = Substitute.For>(); public ArchiveServiceTests(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; - _archiveService = new ArchiveService(_logger); + _archiveService = new ArchiveService(_logger, new DirectoryService(_directoryServiceLogger)); } [Theory] @@ -154,7 +155,7 @@ namespace API.Tests.Services [InlineData("sorting.zip", "sorting.expected.jpg")] public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile) { - var archiveService = Substitute.For(_logger); + var archiveService = Substitute.For(_logger, new DirectoryService(_directoryServiceLogger)); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default); @@ -174,7 +175,7 @@ namespace API.Tests.Services [InlineData("sorting.zip", "sorting.expected.jpg")] public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile) { - var archiveService = Substitute.For(_logger); + var archiveService = Substitute.For(_logger, new DirectoryService(_directoryServiceLogger)); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); diff --git a/API/API.csproj b/API/API.csproj index 458830ca1..50a464d0b 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -64,23 +64,147 @@ + + + + <_ContentIncludedByDefault Remove="logs\kavita.json" /> + <_ContentIncludedByDefault Remove="wwwroot\3rdpartylicenses.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\6.d9925ea83359bb4c7278.js" /> + <_ContentIncludedByDefault Remove="wwwroot\6.d9925ea83359bb4c7278.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\7.860cdd6fd9d758e6c210.js" /> + <_ContentIncludedByDefault Remove="wwwroot\7.860cdd6fd9d758e6c210.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\8.028f6737a2f0621d40c7.js" /> + <_ContentIncludedByDefault Remove="wwwroot\8.028f6737a2f0621d40c7.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\EBGaramond-Italic-VariableFont_wght.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\EBGaramond-VariableFont_wght.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Black.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-BlackItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-BoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraBoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraLight.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraLightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-LightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Medium.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-MediumItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-SemiBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-SemiBoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Thin.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ThinItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Black.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-BlackItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-BoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-LightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Thin.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-ThinItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Black.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-BlackItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-BoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-LightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-ExtraBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\Oswald-VariableFont_wght.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\README.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-ExtraLight.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Medium.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-SemiBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\RocknRoll_One\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\RocknRoll_One\RocknRollOne-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.dark-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.dark.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.dark-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.dark.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\preset-light.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\themes\dark.scss" /> + <_ContentIncludedByDefault Remove="wwwroot\common.ad975892146299f80adb.js" /> + <_ContentIncludedByDefault Remove="wwwroot\common.ad975892146299f80adb.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\EBGaramond-VariableFont_wght.2a1da2dbe7a28d63f8cb.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.0fea24969112a781acd2.eot" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.c967a94cfbe2b06627ff.woff2" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.dc2cbadd690e1d4b2c9c.woff" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.e33e2cf6e02cac2ccb77.svg" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.ec82f282c7f54b637098.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.06b9d19ced8d17f3d5cb.svg" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.08f9891a6f44d9546678.eot" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1008b5226941c24f4468.woff2" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1069ea55beaa01060302.woff" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1495f578452eb676f730.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.10ecefc282f2761808bf.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.371dbce0dd46bd4d2033.svg" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.3a24a60e7f9c6574864a.eot" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.3ceb50e7bcafb577367c.woff2" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.46fdbd2d897f8824e63c.woff" /> + <_ContentIncludedByDefault Remove="wwwroot\favicon.ico" /> + <_ContentIncludedByDefault Remove="wwwroot\FiraSans-Regular.1c0bf0728b51cb9f2ddc.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\index.html" /> + <_ContentIncludedByDefault Remove="wwwroot\Lato-Regular.9919edff6283018571ad.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\LibreBaskerville-Regular.a27f99ca45522bb3d56d.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\main.44f5c0973044295d8be0.js" /> + <_ContentIncludedByDefault Remove="wwwroot\main.44f5c0973044295d8be0.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\Merriweather-Regular.55c73e48e04ec926ebfe.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\NanumGothic-Regular.6c84540de7730f833d6c.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\polyfills.348e08e9d0e910a15938.js" /> + <_ContentIncludedByDefault Remove="wwwroot\polyfills.348e08e9d0e910a15938.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\RocknRollOne-Regular.c75da4712d1e65ed1f69.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\runtime.ea545c6916f85411478f.js" /> + <_ContentIncludedByDefault Remove="wwwroot\runtime.ea545c6916f85411478f.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\styles.4bd902bb3037f36f2c64.css" /> + <_ContentIncludedByDefault Remove="wwwroot\styles.4bd902bb3037f36f2c64.css.map" /> + <_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js" /> + <_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js.map" /> diff --git a/API/Comparators/StringLogicalComparer.cs b/API/Comparators/StringLogicalComparer.cs index fe930c45c..67aa72225 100644 --- a/API/Comparators/StringLogicalComparer.cs +++ b/API/Comparators/StringLogicalComparer.cs @@ -2,7 +2,7 @@ // Version 2 // Taken from: https://www.codeproject.com/Articles/11016/Numeric-String-Sort-in-C -using System; +using static System.Char; namespace API.Comparators { @@ -20,26 +20,26 @@ namespace API.Comparators if (string.IsNullOrEmpty(s2)) return -1; //WE style, special case - var sp1 = Char.IsLetterOrDigit(s1, 0); - var sp2 = Char.IsLetterOrDigit(s2, 0); + var sp1 = IsLetterOrDigit(s1, 0); + var sp2 = IsLetterOrDigit(s2, 0); if(sp1 && !sp2) return 1; if(!sp1 && sp2) return -1; int i1 = 0, i2 = 0; //current index while(true) { - var c1 = Char.IsDigit(s1, i1); - var c2 = Char.IsDigit(s2, i2); + var c1 = IsDigit(s1, i1); + var c2 = IsDigit(s2, i2); int r; // temp result if(!c1 && !c2) { - bool letter1 = Char.IsLetter(s1, i1); - bool letter2 = Char.IsLetter(s2, i2); + bool letter1 = IsLetter(s1, i1); + bool letter2 = IsLetter(s2, i2); if((letter1 && letter2) || (!letter1 && !letter2)) { if(letter1 && letter2) { - r = Char.ToLower(s1[i1]).CompareTo(Char.ToLower(s2[i2])); + r = ToLower(s1[i1]).CompareTo(ToLower(s2[i2])); } else { @@ -114,8 +114,8 @@ namespace API.Comparators { nzStart = start; end = start; - bool countZeros = true; - while(Char.IsDigit(s, end)) + var countZeros = true; + while(IsDigit(s, end)) { if(countZeros && s[end].Equals('0')) { diff --git a/API/Configurations/CustomOptions/StatsOptions.cs b/API/Configurations/CustomOptions/StatsOptions.cs new file mode 100644 index 000000000..ac0cd0ac5 --- /dev/null +++ b/API/Configurations/CustomOptions/StatsOptions.cs @@ -0,0 +1,30 @@ +using System; + +namespace API.Configurations.CustomOptions +{ + public class StatsOptions + { + public string ServerUrl { get; set; } + public string ServerSecret { get; set; } + public string SendDataAt { get; set; } + + private const char Separator = ':'; + + public short SendDataHour => GetValueFromSendAt(0); + public short SendDataMinute => GetValueFromSendAt(1); + + // The expected SendDataAt format is: Hour:Minute. Ex: 19:45 + private short GetValueFromSendAt(int index) + { + var key = $"{nameof(StatsOptions)}:{nameof(SendDataAt)}"; + + if (string.IsNullOrEmpty(SendDataAt)) + throw new InvalidOperationException($"{key} is invalid. Check the app settings file"); + + if (short.TryParse(SendDataAt.Split(Separator)[index], out var parsedValue)) + return parsedValue; + + throw new InvalidOperationException($"Could not parse {key}. Check the app settings file"); + } + } +} \ No newline at end of file diff --git a/API/Constants/PolicyConstants.cs b/API/Constants/PolicyConstants.cs index 6b6d93ae0..c76d71926 100644 --- a/API/Constants/PolicyConstants.cs +++ b/API/Constants/PolicyConstants.cs @@ -4,5 +4,9 @@ { public const string AdminRole = "Admin"; public const string PlebRole = "Pleb"; + /// + /// Used to give a user ability to download files from the server + /// + public const string DownloadRole = "Download"; } } \ No newline at end of file diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 8c3c05c85..876cecf84 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Reflection; using System.Threading.Tasks; using API.Constants; using API.DTOs; @@ -82,42 +83,55 @@ namespace API.Controllers [HttpPost("register")] public async Task> Register(RegisterDto registerDto) { - if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper())) + try { - return BadRequest("Username is taken."); - } - - var user = _mapper.Map(registerDto); - user.UserPreferences ??= new AppUserPreferences(); - - var result = await _userManager.CreateAsync(user, registerDto.Password); - - if (!result.Succeeded) return BadRequest(result.Errors); - - var role = registerDto.IsAdmin ? PolicyConstants.AdminRole : PolicyConstants.PlebRole; - var roleResult = await _userManager.AddToRoleAsync(user, role); - - if (!roleResult.Succeeded) return BadRequest(result.Errors); - - // When we register an admin, we need to grant them access to all Libraries. - if (registerDto.IsAdmin) - { - _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", user.UserName); - var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); - foreach (var lib in libraries) + if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper())) { - lib.AppUsers ??= new List(); - lib.AppUsers.Add(user); + return BadRequest("Username is taken."); } - if (libraries.Any() && !await _unitOfWork.Complete()) _logger.LogError("There was an issue granting library access. Please do this manually"); + + var user = _mapper.Map(registerDto); + user.UserPreferences ??= new AppUserPreferences(); + + var result = await _userManager.CreateAsync(user, registerDto.Password); + + if (!result.Succeeded) return BadRequest(result.Errors); + + var role = registerDto.IsAdmin ? PolicyConstants.AdminRole : PolicyConstants.PlebRole; + var roleResult = await _userManager.AddToRoleAsync(user, role); + + if (!roleResult.Succeeded) return BadRequest(result.Errors); + + // When we register an admin, we need to grant them access to all Libraries. + if (registerDto.IsAdmin) + { + _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", + user.UserName); + var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); + foreach (var lib in libraries) + { + lib.AppUsers ??= new List(); + lib.AppUsers.Add(user); + } + + if (libraries.Any() && !await _unitOfWork.CommitAsync()) + _logger.LogError("There was an issue granting library access. Please do this manually"); + } + + return new UserDto + { + Username = user.UserName, + Token = await _tokenService.CreateToken(user), + Preferences = _mapper.Map(user.UserPreferences) + }; + } + catch (Exception ex) + { + _logger.LogError(ex, "Something went wrong when registering user"); + await _unitOfWork.RollbackAsync(); } - return new UserDto - { - Username = user.UserName, - Token = await _tokenService.CreateToken(user), - Preferences = _mapper.Map(user.UserPreferences) - }; + return BadRequest("Something went wrong when registering user"); } [HttpPost("login")] @@ -139,7 +153,7 @@ namespace API.Controllers user.UserPreferences ??= new AppUserPreferences(); _unitOfWork.UserRepository.Update(user); - await _unitOfWork.Complete(); + await _unitOfWork.CommitAsync(); _logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive); @@ -150,5 +164,50 @@ namespace API.Controllers Preferences = _mapper.Map(user.UserPreferences) }; } + + [HttpGet("roles")] + public ActionResult> GetRoles() + { + return typeof(PolicyConstants) + .GetFields(BindingFlags.Public | BindingFlags.Static) + .Where(f => f.FieldType == typeof(string)) + .ToDictionary(f => f.Name, + f => (string) f.GetValue(null)).Values.ToList(); + } + + [HttpPost("update-rbs")] + public async Task UpdateRoles(UpdateRbsDto updateRbsDto) + { + var user = await _userManager.Users + .Include(u => u.UserPreferences) + .SingleOrDefaultAsync(x => x.NormalizedUserName == updateRbsDto.Username.ToUpper()); + if (updateRbsDto.Roles.Contains(PolicyConstants.AdminRole) || + updateRbsDto.Roles.Contains(PolicyConstants.PlebRole)) + { + return BadRequest("Invalid Roles"); + } + + var existingRoles = (await _userManager.GetRolesAsync(user)) + .Where(s => s != PolicyConstants.AdminRole && s != PolicyConstants.PlebRole) + .ToList(); + + // Find what needs to be added and what needs to be removed + var rolesToRemove = existingRoles.Except(updateRbsDto.Roles); + var result = await _userManager.AddToRolesAsync(user, updateRbsDto.Roles); + + if (!result.Succeeded) + { + await _unitOfWork.RollbackAsync(); + return BadRequest("Something went wrong, unable to update user's roles"); + } + if ((await _userManager.RemoveFromRolesAsync(user, rolesToRemove)).Succeeded) + { + return Ok(); + } + + await _unitOfWork.RollbackAsync(); + return BadRequest("Something went wrong, unable to update user's roles"); + + } } } \ No newline at end of file diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs index a2af28ab6..e5a980467 100644 --- a/API/Controllers/BookController.cs +++ b/API/Controllers/BookController.cs @@ -186,6 +186,9 @@ namespace API.Controllers var content = await contentFileRef.ReadContentAsync(); if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content); + // In more cases than not, due to this being XML not HTML, we need to escape the script tags. + content = BookService.EscapeTags(content); + doc.LoadHtml(content); var body = doc.DocumentNode.SelectSingleNode("//body"); diff --git a/API/Controllers/CollectionController.cs b/API/Controllers/CollectionController.cs index 27455a283..e09f8592a 100644 --- a/API/Controllers/CollectionController.cs +++ b/API/Controllers/CollectionController.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using API.Constants; @@ -9,7 +10,6 @@ using API.Interfaces; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; namespace API.Controllers { @@ -33,11 +33,7 @@ namespace API.Controllers { return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); } - else - { - return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); - } - + return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); } [Authorize(Policy = "RequireAdminRole")] @@ -64,7 +60,7 @@ namespace API.Controllers if (_unitOfWork.HasChanges()) { - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok("Tag updated successfully"); } @@ -81,38 +77,42 @@ namespace API.Controllers [HttpPost("update-series")] public async Task UpdateSeriesForTag(UpdateSeriesForTagDto updateSeriesForTagDto) { - var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); - if (tag == null) return BadRequest("Not a valid Tag"); - tag.SeriesMetadatas ??= new List(); - - // Check if Tag has updated (Summary) - if (tag.Summary == null || !tag.Summary.Equals(updateSeriesForTagDto.Tag.Summary)) + try { - tag.Summary = updateSeriesForTagDto.Tag.Summary; - _unitOfWork.CollectionTagRepository.Update(tag); - } + var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); + if (tag == null) return BadRequest("Not a valid Tag"); + tag.SeriesMetadatas ??= new List(); - foreach (var seriesIdToRemove in updateSeriesForTagDto.SeriesIdsToRemove) - { - tag.SeriesMetadatas.Remove(tag.SeriesMetadatas.Single(sm => sm.SeriesId == seriesIdToRemove)); - } - + // Check if Tag has updated (Summary) + if (tag.Summary == null || !tag.Summary.Equals(updateSeriesForTagDto.Tag.Summary)) + { + tag.Summary = updateSeriesForTagDto.Tag.Summary; + _unitOfWork.CollectionTagRepository.Update(tag); + } - if (tag.SeriesMetadatas.Count == 0) - { - _unitOfWork.CollectionTagRepository.Remove(tag); - } + foreach (var seriesIdToRemove in updateSeriesForTagDto.SeriesIdsToRemove) + { + tag.SeriesMetadatas.Remove(tag.SeriesMetadatas.Single(sm => sm.SeriesId == seriesIdToRemove)); + } - if (_unitOfWork.HasChanges() && await _unitOfWork.Complete()) + + if (tag.SeriesMetadatas.Count == 0) + { + _unitOfWork.CollectionTagRepository.Remove(tag); + } + + if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) + { + return Ok("Tag updated"); + } + } + catch (Exception) { - return Ok("Tag updated"); + await _unitOfWork.RollbackAsync(); } return BadRequest("Something went wrong. Please try again."); } - - - } } \ No newline at end of file diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs new file mode 100644 index 000000000..6e7408fa4 --- /dev/null +++ b/API/Controllers/DownloadController.cs @@ -0,0 +1,135 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Entities; +using API.Extensions; +using API.Interfaces; +using API.Interfaces.Services; +using API.Services; +using Kavita.Common; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.StaticFiles; + +namespace API.Controllers +{ + [Authorize(Policy = "RequireDownloadRole")] + public class DownloadController : BaseApiController + { + private readonly IUnitOfWork _unitOfWork; + private readonly IArchiveService _archiveService; + private readonly IDirectoryService _directoryService; + + public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService) + { + _unitOfWork = unitOfWork; + _archiveService = archiveService; + _directoryService = directoryService; + } + + [HttpGet("volume-size")] + public async Task> GetVolumeSize(int volumeId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); + return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + [HttpGet("chapter-size")] + public async Task> GetChapterSize(int chapterId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId); + return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + [HttpGet("series-size")] + public async Task> GetSeriesSize(int seriesId) + { + var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); + return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + [HttpGet("volume")] + public async Task DownloadVolume(int volumeId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); + try + { + if (files.Count == 1) + { + return await GetFirstFileDownload(files); + } + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), + $"download_{User.GetUsername()}_v{volumeId}"); + return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip"); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + private async Task GetFirstFileDownload(IEnumerable files) + { + var firstFile = files.Select(c => c.FilePath).First(); + var fileProvider = new FileExtensionContentTypeProvider(); + // Figures out what the content type should be based on the file name. + if (!fileProvider.TryGetContentType(firstFile, out var contentType)) + { + contentType = Path.GetExtension(firstFile).ToLowerInvariant() switch + { + ".cbz" => "application/zip", + ".cbr" => "application/vnd.rar", + ".cb7" => "application/x-compressed", + ".epub" => "application/epub+zip", + ".7z" => "application/x-7z-compressed", + ".7zip" => "application/x-7z-compressed", + _ => contentType + }; + } + + return File(await _directoryService.ReadFileAsync(firstFile), contentType, Path.GetFileNameWithoutExtension(firstFile)); + } + + [HttpGet("chapter")] + public async Task DownloadChapter(int chapterId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId); + try + { + if (files.Count == 1) + { + return await GetFirstFileDownload(files); + } + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), + $"download_{User.GetUsername()}_c{chapterId}"); + return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip"); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + [HttpGet("series")] + public async Task DownloadSeries(int seriesId) + { + var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); + try + { + if (files.Count == 1) + { + return await GetFirstFileDownload(files); + } + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), + $"download_{User.GetUsername()}_s{seriesId}"); + return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip"); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + } +} \ No newline at end of file diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 72a91f1fb..352b9f873 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -67,7 +67,7 @@ namespace API.Controllers } - if (!await _unitOfWork.Complete()) return BadRequest("There was a critical issue. Please try again."); + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue. Please try again."); _logger.LogInformation("Created a new library: {LibraryName}", library.Name); _taskScheduler.ScanLibrary(library.Id); @@ -133,7 +133,7 @@ namespace API.Controllers return Ok(_mapper.Map(user)); } - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); return Ok(_mapper.Map(user)); @@ -199,7 +199,7 @@ namespace API.Controllers _unitOfWork.LibraryRepository.Update(library); - if (!await _unitOfWork.Complete()) return BadRequest("There was a critical issue updating the library."); + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue updating the library."); if (differenceBetweenFolders.Any()) { _taskScheduler.ScanLibrary(library.Id, true); diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 5a39f354a..b9bc15fb7 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; using API.DTOs; +using API.DTOs.Reader; using API.Entities; using API.Extensions; using API.Interfaces; @@ -49,15 +50,27 @@ namespace API.Controllers return File(content, "image/" + format); } - - [HttpGet("chapter-path")] - public async Task> GetImagePath(int chapterId) + + [HttpGet("chapter-info")] + public async Task> GetChapterInfo(int chapterId) { var chapter = await _cacheService.Ensure(chapterId); - if (chapter == null) return BadRequest("There was an issue finding image file for reading"); - + if (chapter == null) return BadRequest("Could not find Chapter"); + var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId); + if (volume == null) return BadRequest("Could not find Volume"); var (_, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0); - return Ok(mangaFile.FilePath); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); + + return Ok(new ChapterInfoDto() + { + ChapterNumber = chapter.Range, + VolumeNumber = volume.Number + string.Empty, + VolumeId = volume.Id, + FileName = Path.GetFileName(mangaFile.FilePath), + SeriesName = series?.Name, + IsSpecial = chapter.IsSpecial, + Pages = chapter.Pages, + }); } [HttpGet("get-bookmark")] @@ -116,7 +129,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -157,7 +170,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -198,7 +211,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -251,7 +264,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -272,20 +285,10 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); - + var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId); if (currentVolume.Number == 0) { - var next = false; - foreach (var chapter in currentVolume.Chapters) - { - if (next) - { - return Ok(chapter.Id); - } - if (currentChapterId == chapter.Id) next = true; - } - - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } @@ -293,7 +296,7 @@ namespace API.Controllers { if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } @@ -305,7 +308,7 @@ namespace API.Controllers return Ok(-1); } - private int GetNextChapterId(IEnumerable chapters, int currentChapterId) + private static int GetNextChapterId(IEnumerable chapters, string currentChapterNumber) { var next = false; foreach (var chapter in chapters) @@ -314,7 +317,7 @@ namespace API.Controllers { return chapter.Id; } - if (currentChapterId == chapter.Id) next = true; + if (currentChapterNumber.Equals(chapter.Number)) next = true; } return -1; @@ -333,11 +336,11 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); - + var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId); if (currentVolume.Number == 0) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } @@ -345,7 +348,7 @@ namespace API.Controllers { if (volume.Number == currentVolume.Number) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } if (volume.Number == currentVolume.Number - 1) diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index caa55b229..3780538ad 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -114,7 +114,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (!await _unitOfWork.Complete()) return BadRequest("There was a critical error."); + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical error."); return Ok(); } @@ -139,7 +139,7 @@ namespace API.Controllers _unitOfWork.SeriesRepository.Update(series); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -190,61 +190,68 @@ namespace API.Controllers [HttpPost("metadata")] public async Task UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto) { - var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId; - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); - if (series.Metadata == null) + try { - series.Metadata = DbFactory.SeriesMetadata(updateSeriesMetadataDto.Tags - .Select(dto => DbFactory.CollectionTag(dto.Id, dto.Title, dto.Summary, dto.Promoted)).ToList()); - } - else - { - series.Metadata.CollectionTags ??= new List(); - var newTags = new List(); - - // I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different - var existingTags = series.Metadata.CollectionTags.ToList(); - foreach (var existing in existingTags) + var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId; + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); + if (series.Metadata == null) { - if (updateSeriesMetadataDto.Tags.SingleOrDefault(t => t.Id == existing.Id) == null) + series.Metadata = DbFactory.SeriesMetadata(updateSeriesMetadataDto.Tags + .Select(dto => DbFactory.CollectionTag(dto.Id, dto.Title, dto.Summary, dto.Promoted)).ToList()); + } + else + { + series.Metadata.CollectionTags ??= new List(); + var newTags = new List(); + + // I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different + var existingTags = series.Metadata.CollectionTags.ToList(); + foreach (var existing in existingTags) { - // Remove tag - series.Metadata.CollectionTags.Remove(existing); + if (updateSeriesMetadataDto.Tags.SingleOrDefault(t => t.Id == existing.Id) == null) + { + // Remove tag + series.Metadata.CollectionTags.Remove(existing); + } + } + + // At this point, all tags that aren't in dto have been removed. + foreach (var tag in updateSeriesMetadataDto.Tags) + { + var existingTag = series.Metadata.CollectionTags.SingleOrDefault(t => t.Title == tag.Title); + if (existingTag != null) + { + // Update existingTag + existingTag.Promoted = tag.Promoted; + existingTag.Title = tag.Title; + existingTag.NormalizedTitle = Parser.Parser.Normalize(tag.Title).ToUpper(); + } + else + { + // Add new tag + newTags.Add(DbFactory.CollectionTag(tag.Id, tag.Title, tag.Summary, tag.Promoted)); + } + } + + foreach (var tag in newTags) + { + series.Metadata.CollectionTags.Add(tag); } } - // At this point, all tags that aren't in dto have been removed. - foreach (var tag in updateSeriesMetadataDto.Tags) + if (!_unitOfWork.HasChanges()) { - var existingTag = series.Metadata.CollectionTags.SingleOrDefault(t => t.Title == tag.Title); - if (existingTag != null) - { - // Update existingTag - existingTag.Promoted = tag.Promoted; - existingTag.Title = tag.Title; - existingTag.NormalizedTitle = Parser.Parser.Normalize(tag.Title).ToUpper(); - } - else - { - // Add new tag - newTags.Add(DbFactory.CollectionTag(tag.Id, tag.Title, tag.Summary, tag.Promoted)); - } + return Ok("No changes to save"); } - foreach (var tag in newTags) + if (await _unitOfWork.CommitAsync()) { - series.Metadata.CollectionTags.Add(tag); + return Ok("Successfully updated"); } } - - if (!_unitOfWork.HasChanges()) + catch (Exception) { - return Ok("No changes to save"); - } - - if (await _unitOfWork.Complete()) - { - return Ok("Successfully updated"); + await _unitOfWork.RollbackAsync(); } return BadRequest("Could not update metadata"); diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 475323e07..398de3efc 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -1,10 +1,9 @@ using System; using System.IO; -using System.IO.Compression; using System.Threading.Tasks; using API.Extensions; using API.Interfaces.Services; -using API.Services; +using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; @@ -19,19 +18,19 @@ namespace API.Controllers private readonly IHostApplicationLifetime _applicationLifetime; private readonly ILogger _logger; private readonly IConfiguration _config; - private readonly IDirectoryService _directoryService; private readonly IBackupService _backupService; + private readonly IArchiveService _archiveService; public ServerController(IHostApplicationLifetime applicationLifetime, ILogger logger, IConfiguration config, - IDirectoryService directoryService, IBackupService backupService) + IBackupService backupService, IArchiveService archiveService) { _applicationLifetime = applicationLifetime; _logger = logger; _config = config; - _directoryService = directoryService; _backupService = backupService; + _archiveService = archiveService; } - + [HttpPost("restart")] public ActionResult RestartServer() { @@ -45,33 +44,17 @@ namespace API.Controllers public async Task GetLogs() { var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); - - var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); - var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); - - var tempLocation = Path.Join(tempDirectory, "logs_" + dateString); - DirectoryService.ExistOrCreate(tempLocation); - if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) - { - return BadRequest("Unable to copy files to temp directory for log download."); - } - - var zipPath = Path.Join(tempDirectory, $"kavita_logs_{dateString}.zip"); try { - ZipFile.CreateFromDirectory(tempLocation, zipPath); + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files, "logs"); + return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); } - catch (AggregateException ex) + catch (KavitaException ex) { - _logger.LogError(ex, "There was an issue when archiving library backup"); - return BadRequest("There was an issue when archiving library backup"); + return BadRequest(ex.Message); } - var fileBytes = await _directoryService.ReadFileAsync(zipPath); - - DirectoryService.ClearAndDeleteDirectory(tempLocation); - (new FileInfo(zipPath)).Delete(); - - return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); } + + } } \ No newline at end of file diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index 33565af56..8677074ab 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -8,6 +8,7 @@ using API.Entities.Enums; using API.Extensions; using API.Helpers.Converters; using API.Interfaces; +using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; @@ -15,7 +16,7 @@ using Microsoft.Extensions.Logging; namespace API.Controllers { - [Authorize] + [Authorize(Policy = "RequireAdminRole")] public class SettingsController : BaseApiController { private readonly ILogger _logger; @@ -30,14 +31,16 @@ namespace API.Controllers _taskScheduler = taskScheduler; _configuration = configuration; } - + [HttpGet("")] public async Task> GetSettings() { - return Ok(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()); + var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + settingsDto.Port = Configuration.GetPort(Program.GetAppSettingFilename()); + settingsDto.LoggingLevel = Configuration.GetLogLevel(Program.GetAppSettingFilename()); + return Ok(settingsDto); } - - [Authorize(Policy = "RequireAdminRole")] + [HttpPost("")] public async Task> UpdateSettings(ServerSettingDto updateSettingsDto) { @@ -76,47 +79,63 @@ namespace API.Controllers if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + "" != setting.Value) { setting.Value = updateSettingsDto.Port + ""; - Environment.SetEnvironmentVariable("KAVITA_PORT", setting.Value); + // Port is managed in appSetting.json + Configuration.UpdatePort(Program.GetAppSettingFilename(), updateSettingsDto.Port); _unitOfWork.SettingsRepository.Update(setting); } if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + "" != setting.Value) { setting.Value = updateSettingsDto.LoggingLevel + ""; + Configuration.UpdateLogLevel(Program.GetAppSettingFilename(), updateSettingsDto.LoggingLevel); _unitOfWork.SettingsRepository.Update(setting); } + + if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + "" != setting.Value) + { + setting.Value = updateSettingsDto.AllowStatCollection + ""; + _unitOfWork.SettingsRepository.Update(setting); + if (!updateSettingsDto.AllowStatCollection) + { + _taskScheduler.CancelStatsTasks(); + } + else + { + _taskScheduler.ScheduleStatsTasks(); + } + } } _configuration.GetSection("Logging:LogLevel:Default").Value = updateSettingsDto.LoggingLevel + ""; if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated"); - if (!_unitOfWork.HasChanges() || !await _unitOfWork.Complete()) + if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync()) + { + await _unitOfWork.RollbackAsync(); return BadRequest("There was a critical issue. Please try again."); - + } + _logger.LogInformation("Server Settings updated"); _taskScheduler.ScheduleTasks(); return Ok(updateSettingsDto); } - - [Authorize(Policy = "RequireAdminRole")] + [HttpGet("task-frequencies")] public ActionResult> GetTaskFrequencies() { return Ok(CronConverter.Options); } - [Authorize(Policy = "RequireAdminRole")] [HttpGet("library-types")] public ActionResult> GetLibraryTypes() { return Ok(Enum.GetNames(typeof(LibraryType))); } - [Authorize(Policy = "RequireAdminRole")] [HttpGet("log-levels")] public ActionResult> GetLogLevels() { - return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical", "None"}); + return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical"}); } } } \ No newline at end of file diff --git a/API/Controllers/StatsController.cs b/API/Controllers/StatsController.cs new file mode 100644 index 000000000..f35552eec --- /dev/null +++ b/API/Controllers/StatsController.cs @@ -0,0 +1,40 @@ +using System; +using System.Threading.Tasks; +using API.DTOs; +using API.Interfaces.Services; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; + +namespace API.Controllers +{ + public class StatsController : BaseApiController + { + private readonly ILogger _logger; + private readonly IStatsService _statsService; + + public StatsController(ILogger logger, IStatsService statsService) + { + _logger = logger; + _statsService = statsService; + } + + [AllowAnonymous] + [HttpPost("client-info")] + public async Task AddClientInfo([FromBody] ClientInfoDto clientInfoDto) + { + try + { + await _statsService.PathData(clientInfoDto); + + return Ok(); + } + catch (Exception e) + { + _logger.LogError(e, "Error updating the usage statistics"); + Console.WriteLine(e); + throw; + } + } + } +} \ No newline at end of file diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index 10d6d3e07..ee4c9ac66 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -26,7 +26,7 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); _unitOfWork.UserRepository.Delete(user); - if (await _unitOfWork.Complete()) return Ok(); + if (await _unitOfWork.CommitAsync()) return Ok(); return BadRequest("Could not delete the user."); } @@ -61,6 +61,8 @@ namespace API.Controllers existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; existingPreferences.ScalingOption = preferencesDto.ScalingOption; existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; + existingPreferences.AutoCloseMenu = preferencesDto.AutoCloseMenu; + existingPreferences.ReaderMode = preferencesDto.ReaderMode; existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; @@ -71,7 +73,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(existingPreferences); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(preferencesDto); } diff --git a/API/DTOs/ClientInfoDto.cs b/API/DTOs/ClientInfoDto.cs new file mode 100644 index 000000000..7070e64d7 --- /dev/null +++ b/API/DTOs/ClientInfoDto.cs @@ -0,0 +1,36 @@ +using System; + +namespace API.DTOs +{ + public class ClientInfoDto + { + public ClientInfoDto() + { + CollectedAt = DateTime.UtcNow; + } + + public string KavitaUiVersion { get; set; } + public string ScreenResolution { get; set; } + public string PlatformType { get; set; } + public DetailsVersion Browser { get; set; } + public DetailsVersion Os { get; set; } + + public DateTime? CollectedAt { get; set; } + + public bool IsTheSameDevice(ClientInfoDto clientInfoDto) + { + return (clientInfoDto.ScreenResolution ?? "").Equals(ScreenResolution) && + (clientInfoDto.PlatformType ?? "").Equals(PlatformType) && + (clientInfoDto.Browser?.Name ?? "").Equals(Browser?.Name) && + (clientInfoDto.Os?.Name ?? "").Equals(Os?.Name) && + clientInfoDto.CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd") + .Equals(CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd")); + } + } + + public class DetailsVersion + { + public string Name { get; set; } + public string Version { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/CollectionTagDto.cs b/API/DTOs/CollectionTagDto.cs index 72027e84a..26f256562 100644 --- a/API/DTOs/CollectionTagDto.cs +++ b/API/DTOs/CollectionTagDto.cs @@ -1,6 +1,4 @@ -using System.Collections.Generic; - -namespace API.DTOs +namespace API.DTOs { public class CollectionTagDto { diff --git a/API/DTOs/Reader/ChapterInfoDto.cs b/API/DTOs/Reader/ChapterInfoDto.cs new file mode 100644 index 000000000..850149016 --- /dev/null +++ b/API/DTOs/Reader/ChapterInfoDto.cs @@ -0,0 +1,16 @@ +namespace API.DTOs.Reader +{ + public class ChapterInfoDto + { + + public string ChapterNumber { get; set; } + public string VolumeNumber { get; set; } + public int VolumeId { get; set; } + public string SeriesName { get; set; } + public string ChapterTitle { get; set; } = ""; + public int Pages { get; set; } + public string FileName { get; set; } + public bool IsSpecial { get; set; } + + } +} \ No newline at end of file diff --git a/API/DTOs/ServerInfoDto.cs b/API/DTOs/ServerInfoDto.cs new file mode 100644 index 000000000..0f4a86d64 --- /dev/null +++ b/API/DTOs/ServerInfoDto.cs @@ -0,0 +1,12 @@ +namespace API.DTOs +{ + public class ServerInfoDto + { + public string Os { get; set; } + public string DotNetVersion { get; set; } + public string RunTimeVersion { get; set; } + public string KavitaVersion { get; set; } + public string BuildBranch { get; set; } + public string Culture { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/ServerSettingDTO.cs b/API/DTOs/ServerSettingDTO.cs index a1617ff11..9a52f9c09 100644 --- a/API/DTOs/ServerSettingDTO.cs +++ b/API/DTOs/ServerSettingDTO.cs @@ -7,5 +7,6 @@ public string LoggingLevel { get; set; } public string TaskBackup { get; set; } public int Port { get; set; } + public bool AllowStatCollection { get; set; } } } \ No newline at end of file diff --git a/API/DTOs/UpdateRBSDto.cs b/API/DTOs/UpdateRBSDto.cs new file mode 100644 index 000000000..8bf37d314 --- /dev/null +++ b/API/DTOs/UpdateRBSDto.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; + +namespace API.DTOs +{ + public class UpdateRbsDto + { + public string Username { get; init; } + public IList Roles { get; init; } + } +} \ No newline at end of file diff --git a/API/DTOs/UpdateSeriesMetadataDto.cs b/API/DTOs/UpdateSeriesMetadataDto.cs index fd71526b7..a9c852632 100644 --- a/API/DTOs/UpdateSeriesMetadataDto.cs +++ b/API/DTOs/UpdateSeriesMetadataDto.cs @@ -1,5 +1,4 @@ using System.Collections.Generic; -using API.Entities; namespace API.DTOs { diff --git a/API/DTOs/UsageInfoDto.cs b/API/DTOs/UsageInfoDto.cs new file mode 100644 index 000000000..ba4b06b41 --- /dev/null +++ b/API/DTOs/UsageInfoDto.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using API.Entities.Enums; + +namespace API.DTOs +{ + public class UsageInfoDto + { + public UsageInfoDto() + { + FileTypes = new HashSet(); + LibraryTypesCreated = new HashSet(); + } + + public int UsersCount { get; set; } + public IEnumerable FileTypes { get; set; } + public IEnumerable LibraryTypesCreated { get; set; } + } + + public class LibInfo + { + public LibraryType Type { get; set; } + public int Count { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/UsageStatisticsDto.cs b/API/DTOs/UsageStatisticsDto.cs new file mode 100644 index 000000000..1180401c3 --- /dev/null +++ b/API/DTOs/UsageStatisticsDto.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace API.DTOs +{ + public class UsageStatisticsDto + { + public UsageStatisticsDto() + { + MarkAsUpdatedNow(); + ClientsInfo = new List(); + } + + public string InstallId { get; set; } + public DateTime LastUpdate { get; set; } + public UsageInfoDto UsageInfo { get; set; } + public ServerInfoDto ServerInfo { get; set; } + public List ClientsInfo { get; set; } + + public void MarkAsUpdatedNow() + { + LastUpdate = DateTime.UtcNow; + } + + public void AddClientInfo(ClientInfoDto clientInfoDto) + { + if (ClientsInfo.Any(x => x.IsTheSameDevice(clientInfoDto))) return; + + ClientsInfo.Add(clientInfoDto); + } + } +} \ No newline at end of file diff --git a/API/DTOs/UserPreferencesDto.cs b/API/DTOs/UserPreferencesDto.cs index 0d8f3ae68..03dbeaa5e 100644 --- a/API/DTOs/UserPreferencesDto.cs +++ b/API/DTOs/UserPreferencesDto.cs @@ -7,6 +7,8 @@ namespace API.DTOs public ReadingDirection ReadingDirection { get; set; } public ScalingOption ScalingOption { get; set; } public PageSplitOption PageSplitOption { get; set; } + public ReaderMode ReaderMode { get; set; } + public bool AutoCloseMenu { get; set; } public bool BookReaderDarkMode { get; set; } = false; public int BookReaderMargin { get; set; } public int BookReaderLineSpacing { get; set; } diff --git a/API/Data/FileRepository.cs b/API/Data/FileRepository.cs new file mode 100644 index 000000000..a90ff4df5 --- /dev/null +++ b/API/Data/FileRepository.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Interfaces; +using Microsoft.EntityFrameworkCore; + +namespace API.Data +{ + public class FileRepository : IFileRepository + { + private readonly DataContext _dbContext; + + public FileRepository(DataContext context) + { + _dbContext = context; + } + + public async Task> GetFileExtensions() + { + var fileExtensions = await _dbContext.MangaFile + .AsNoTracking() + .Select(x => x.FilePath) + .Distinct() + .ToArrayAsync(); + + var uniqueFileTypes = fileExtensions + .Select(Path.GetExtension) + .Where(x => x is not null) + .Distinct(); + + return uniqueFileTypes; + } + } +} \ No newline at end of file diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs index c065bface..0af1f6760 100644 --- a/API/Data/LibraryRepository.cs +++ b/API/Data/LibraryRepository.cs @@ -106,6 +106,8 @@ namespace API.Data .Where(x => x.Id == libraryId) .Include(f => f.Folders) .Include(l => l.Series) + .ThenInclude(s => s.Metadata) + .Include(l => l.Series) .ThenInclude(s => s.Volumes) .ThenInclude(v => v.Chapters) .ThenInclude(c => c.Files) diff --git a/API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs b/API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs new file mode 100644 index 000000000..2797f05ab --- /dev/null +++ b/API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs @@ -0,0 +1,869 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210622164318_NewUserPreferences")] + partial class NewUserPreferences + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("SiteDarkMode") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210622164318_NewUserPreferences.cs b/API/Data/Migrations/20210622164318_NewUserPreferences.cs new file mode 100644 index 000000000..bd75d5b2c --- /dev/null +++ b/API/Data/Migrations/20210622164318_NewUserPreferences.cs @@ -0,0 +1,35 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class NewUserPreferences : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "AutoCloseMenu", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: false); + + migrationBuilder.AddColumn( + name: "ReaderMode", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: 0); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "AutoCloseMenu", + table: "AppUserPreferences"); + + migrationBuilder.DropColumn( + name: "ReaderMode", + table: "AppUserPreferences"); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index f14402ece..c6d49fc2a 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -127,6 +127,9 @@ namespace API.Data.Migrations b.Property("AppUserId") .HasColumnType("INTEGER"); + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + b.Property("BookReaderDarkMode") .HasColumnType("INTEGER"); @@ -151,6 +154,9 @@ namespace API.Data.Migrations b.Property("PageSplitOption") .HasColumnType("INTEGER"); + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + b.Property("ReadingDirection") .HasColumnType("INTEGER"); diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index 2dfeb1c0a..2c7eb373b 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -1,13 +1,14 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Reflection; using System.Threading.Tasks; using API.Constants; using API.Entities; using API.Entities.Enums; using API.Services; +using Kavita.Common; using Microsoft.AspNetCore.Identity; -using Microsoft.EntityFrameworkCore; namespace API.Data { @@ -15,11 +16,13 @@ namespace API.Data { public static async Task SeedRoles(RoleManager roleManager) { - var roles = new List - { - new() {Name = PolicyConstants.AdminRole}, - new() {Name = PolicyConstants.PlebRole} - }; + var roles = typeof(PolicyConstants) + .GetFields(BindingFlags.Public | BindingFlags.Static) + .Where(f => f.FieldType == typeof(string)) + .ToDictionary(f => f.Name, + f => (string) f.GetValue(null)).Values + .Select(policyName => new AppRole() {Name = policyName}) + .ToList(); foreach (var role in roles) { @@ -39,12 +42,13 @@ namespace API.Data { new() {Key = ServerSettingKey.CacheDirectory, Value = CacheService.CacheDirectory}, new () {Key = ServerSettingKey.TaskScan, Value = "daily"}, - //new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, + new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, // Not used from DB, but DB is sync with appSettings.json new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))}, - new () {Key = ServerSettingKey.Port, Value = "5000"}, + new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json + new () {Key = ServerSettingKey.AllowStatCollection, Value = "true"}, }; - + foreach (var defaultSetting in defaultSettings) { var existing = context.ServerSetting.FirstOrDefault(s => s.Key == defaultSetting.Key); @@ -55,22 +59,16 @@ namespace API.Data } await context.SaveChangesAsync(); - } - - public static async Task SeedSeriesMetadata(DataContext context) - { - await context.Database.EnsureCreatedAsync(); - context.Database.EnsureCreated(); - var series = await context.Series - .Include(s => s.Metadata).ToListAsync(); - - foreach (var s in series) - { - s.Metadata ??= new SeriesMetadata(); - } - + // Port and LoggingLevel are managed in appSettings.json. Update the DB values to match + var configFile = Program.GetAppSettingFilename(); + context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.Port).Value = + Configuration.GetPort(configFile) + ""; + context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.LoggingLevel).Value = + Configuration.GetLogLevel(configFile); + await context.SaveChangesAsync(); + } } } \ No newline at end of file diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs index c6575126b..07d7102e1 100644 --- a/API/Data/SeriesRepository.cs +++ b/API/Data/SeriesRepository.cs @@ -289,7 +289,7 @@ namespace API.Data /// /// /// Library to restrict to, if 0, will apply to all libraries - /// How many series to pick. + /// Contains pagination information /// public async Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams) { @@ -411,5 +411,16 @@ namespace API.Data return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); } + + public async Task> GetFilesForSeries(int seriesId) + { + return await _context.Volume + .Where(v => v.SeriesId == seriesId) + .Include(v => v.Chapters) + .ThenInclude(c => c.Files) + .SelectMany(v => v.Chapters.SelectMany(c => c.Files)) + .AsNoTracking() + .ToListAsync(); + } } } \ No newline at end of file diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index 178136e3a..394e6fed1 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -29,8 +29,13 @@ namespace API.Data public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper); - - public async Task Complete() + public IFileRepository FileRepository => new FileRepository(_context); + + public bool Commit() + { + return _context.SaveChanges() > 0; + } + public async Task CommitAsync() { return await _context.SaveChangesAsync() > 0; } @@ -39,5 +44,16 @@ namespace API.Data { return _context.ChangeTracker.HasChanges(); } + + public async Task RollbackAsync() + { + await _context.DisposeAsync(); + return true; + } + public bool Rollback() + { + _context.Dispose(); + return true; + } } } \ No newline at end of file diff --git a/API/Data/VolumeRepository.cs b/API/Data/VolumeRepository.cs index 6b9e541ea..78a078e03 100644 --- a/API/Data/VolumeRepository.cs +++ b/API/Data/VolumeRepository.cs @@ -65,6 +65,8 @@ namespace API.Data .SingleOrDefaultAsync(); } + + public async Task GetChapterDtoAsync(int chapterId) { @@ -84,5 +86,15 @@ namespace API.Data .AsNoTracking() .ToListAsync(); } + + public async Task> GetFilesForVolume(int volumeId) + { + return await _context.Chapter + .Where(c => volumeId == c.VolumeId) + .Include(c => c.Files) + .SelectMany(c => c.Files) + .AsNoTracking() + .ToListAsync(); + } } } \ No newline at end of file diff --git a/API/Entities/AppUserPreferences.cs b/API/Entities/AppUserPreferences.cs index fb5fe9bc2..e78c4b015 100644 --- a/API/Entities/AppUserPreferences.cs +++ b/API/Entities/AppUserPreferences.cs @@ -17,7 +17,18 @@ namespace API.Entities /// Manga Reader Option: Which side of a split image should we show first /// public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; - + /// + /// Manga Reader Option: How the manga reader should perform paging or reading of the file + /// + /// Webtoon uses scrolling to page, MANGA_LR uses paging by clicking left/right side of reader, MANGA_UD uses paging + /// by clicking top/bottom sides of reader. + /// + /// + public ReaderMode ReaderMode { get; set; } + /// + /// Manga Reader Option: Allow the menu to close after 6 seconds without interaction + /// + public bool AutoCloseMenu { get; set; } /// /// Book Reader Option: Should the background color be dark /// @@ -46,10 +57,11 @@ namespace API.Entities /// Book Reader Option: What direction should the next/prev page buttons go /// public ReadingDirection BookReaderReadingDirection { get; set; } = ReadingDirection.LeftToRight; + /// /// UI Site Global Setting: Whether the UI should render in Dark mode or not. /// - public bool SiteDarkMode { get; set; } + public bool SiteDarkMode { get; set; } = true; diff --git a/API/Entities/Enums/ReaderMode.cs b/API/Entities/Enums/ReaderMode.cs new file mode 100644 index 000000000..04156df24 --- /dev/null +++ b/API/Entities/Enums/ReaderMode.cs @@ -0,0 +1,14 @@ +using System.ComponentModel; + +namespace API.Entities.Enums +{ + public enum ReaderMode + { + [Description("Left and Right")] + MANGA_LR = 0, + [Description("Up and Down")] + MANGA_UP = 1, + [Description("Webtoon")] + WEBTOON = 2 + } +} \ No newline at end of file diff --git a/API/Entities/Enums/ServerSettingKey.cs b/API/Entities/Enums/ServerSettingKey.cs index 0aa5563f2..28378d4d1 100644 --- a/API/Entities/Enums/ServerSettingKey.cs +++ b/API/Entities/Enums/ServerSettingKey.cs @@ -15,6 +15,9 @@ namespace API.Entities.Enums [Description("Port")] Port = 4, [Description("BackupDirectory")] - BackupDirectory = 5 + BackupDirectory = 5, + [Description("AllowStatCollection")] + AllowStatCollection = 6, + } } \ No newline at end of file diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs index 4ea8f1cf4..4d8a48be4 100644 --- a/API/Entities/Series.cs +++ b/API/Entities/Series.cs @@ -32,7 +32,7 @@ namespace API.Entities /// /// Summary information related to the Series /// - public string Summary { get; set; } // TODO: Migrate into SeriesMetdata + public string Summary { get; set; } // TODO: Migrate into SeriesMetdata (with Metadata update) public DateTime Created { get; set; } public DateTime LastModified { get; set; } public byte[] CoverImage { get; set; } diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index a3406ae27..e713acbe1 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -4,18 +4,22 @@ using API.Interfaces; using API.Interfaces.Services; using API.Services; using API.Services.Tasks; +using Kavita.Common; +using Microsoft.AspNetCore.Hosting; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; namespace API.Extensions { public static class ApplicationServiceExtensions { - public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config) + public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env) { services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly); + services.AddScoped(); services.AddScoped(); services.AddScoped(); services.AddScoped(); @@ -27,12 +31,8 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); services.AddScoped(); - - services.AddDbContext(options => - { - options.UseSqlite(config.GetConnectionString("DefaultConnection")); - }); + services.AddSqLite(config, env); services.AddLogging(loggingBuilder => { @@ -42,9 +42,17 @@ namespace API.Extensions return services; } - - public static IServiceCollection AddStartupTask(this IServiceCollection services) - where T : class, IStartupTask - => services.AddTransient(); + + private static IServiceCollection AddSqLite(this IServiceCollection services, IConfiguration config, + IWebHostEnvironment env) + { + services.AddDbContext(options => + { + options.UseSqlite(config.GetConnectionString("DefaultConnection")); + options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.GetLogLevel(Program.GetAppSettingFilename()).Equals("Debug")); + }); + + return services; + } } } \ No newline at end of file diff --git a/API/Extensions/DirectoryInfoExtensions.cs b/API/Extensions/DirectoryInfoExtensions.cs index c41ca9f8b..0eaf6c00a 100644 --- a/API/Extensions/DirectoryInfoExtensions.cs +++ b/API/Extensions/DirectoryInfoExtensions.cs @@ -1,12 +1,12 @@ -using System; -using System.IO; +using System.IO; using System.Linq; -using API.Services; +using API.Comparators; namespace API.Extensions { public static class DirectoryInfoExtensions { + private static readonly NaturalSortComparer Comparer = new NaturalSortComparer(); public static void Empty(this DirectoryInfo directory) { foreach(FileInfo file in directory.EnumerateFiles()) file.Delete(); @@ -49,12 +49,13 @@ namespace API.Extensions if (!root.FullName.Equals(directory.FullName)) { var fileIndex = 1; - foreach (var file in directory.EnumerateFiles()) + + foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, Comparer)) { if (file.Directory == null) continue; var paddedIndex = Parser.Parser.PadZeros(directoryIndex + ""); // We need to rename the files so that after flattening, they are in the order we found them - var newName = $"{paddedIndex}_{fileIndex}.{file.Extension}"; + var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}"; var newPath = Path.Join(root.FullName, newName); if (!File.Exists(newPath)) file.MoveTo(newPath); fileIndex++; diff --git a/API/Extensions/IdentityServiceExtensions.cs b/API/Extensions/IdentityServiceExtensions.cs index 2d2a235f5..5310cf2ef 100644 --- a/API/Extensions/IdentityServiceExtensions.cs +++ b/API/Extensions/IdentityServiceExtensions.cs @@ -39,6 +39,7 @@ namespace API.Extensions services.AddAuthorization(opt => { opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole)); + opt.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole)); }); return services; diff --git a/API/Extensions/ServiceCollectionExtensions.cs b/API/Extensions/ServiceCollectionExtensions.cs index d3cae4191..3559f3856 100644 --- a/API/Extensions/ServiceCollectionExtensions.cs +++ b/API/Extensions/ServiceCollectionExtensions.cs @@ -1,4 +1,7 @@ -using API.Interfaces.Services; +using System; +using API.Interfaces.Services; +using API.Services.Clients; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; namespace API.Extensions @@ -8,5 +11,15 @@ namespace API.Extensions public static IServiceCollection AddStartupTask(this IServiceCollection services) where T : class, IStartupTask => services.AddTransient(); + + public static IServiceCollection AddStatsClient(this IServiceCollection services, IConfiguration configuration) + { + services.AddHttpClient(client => + { + client.DefaultRequestHeaders.Add("api-key", "MsnvA2DfQqxSK5jh"); + }); + + return services; + } } } \ No newline at end of file diff --git a/API/Helpers/Converters/ServerSettingConverter.cs b/API/Helpers/Converters/ServerSettingConverter.cs index 27d1cbbae..261c1bff1 100644 --- a/API/Helpers/Converters/ServerSettingConverter.cs +++ b/API/Helpers/Converters/ServerSettingConverter.cs @@ -30,6 +30,9 @@ namespace API.Helpers.Converters case ServerSettingKey.Port: destination.Port = int.Parse(row.Value); break; + case ServerSettingKey.AllowStatCollection: + destination.AllowStatCollection = bool.Parse(row.Value); + break; } } diff --git a/API/Interfaces/IFileRepository.cs b/API/Interfaces/IFileRepository.cs new file mode 100644 index 000000000..cde587855 --- /dev/null +++ b/API/Interfaces/IFileRepository.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace API.Interfaces +{ + public interface IFileRepository + { + Task> GetFileExtensions(); + } +} \ No newline at end of file diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs index 0b89d16b6..166ab05c3 100644 --- a/API/Interfaces/ISeriesRepository.cs +++ b/API/Interfaces/ISeriesRepository.cs @@ -61,5 +61,6 @@ namespace API.Interfaces Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams); Task GetSeriesMetadata(int seriesId); Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams); + Task> GetFilesForSeries(int seriesId); } } \ No newline at end of file diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs index 75f70c1fa..4f3aba6f8 100644 --- a/API/Interfaces/ITaskScheduler.cs +++ b/API/Interfaces/ITaskScheduler.cs @@ -11,5 +11,7 @@ void RefreshMetadata(int libraryId, bool forceUpdate = true); void CleanupTemp(); void RefreshSeriesMetadata(int libraryId, int seriesId); + void ScheduleStatsTasks(); + void CancelStatsTasks(); } } \ No newline at end of file diff --git a/API/Interfaces/IUnitOfWork.cs b/API/Interfaces/IUnitOfWork.cs index 8f4b53c8f..63051d2e3 100644 --- a/API/Interfaces/IUnitOfWork.cs +++ b/API/Interfaces/IUnitOfWork.cs @@ -11,7 +11,11 @@ namespace API.Interfaces ISettingsRepository SettingsRepository { get; } IAppUserProgressRepository AppUserProgressRepository { get; } ICollectionTagRepository CollectionTagRepository { get; } - Task Complete(); + IFileRepository FileRepository { get; } + bool Commit(); + Task CommitAsync(); bool HasChanges(); + bool Rollback(); + Task RollbackAsync(); } } \ No newline at end of file diff --git a/API/Interfaces/IVolumeRepository.cs b/API/Interfaces/IVolumeRepository.cs index faf18abb8..b5ac06087 100644 --- a/API/Interfaces/IVolumeRepository.cs +++ b/API/Interfaces/IVolumeRepository.cs @@ -13,5 +13,6 @@ namespace API.Interfaces Task> GetFilesForChapter(int chapterId); Task> GetChaptersAsync(int volumeId); Task GetChapterCoverImageAsync(int chapterId); + Task> GetFilesForVolume(int volumeId); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IArchiveService.cs b/API/Interfaces/Services/IArchiveService.cs index aa5df49e2..18869b7cd 100644 --- a/API/Interfaces/Services/IArchiveService.cs +++ b/API/Interfaces/Services/IArchiveService.cs @@ -1,4 +1,7 @@ -using System.IO.Compression; +using System; +using System.Collections.Generic; +using System.IO.Compression; +using System.Threading.Tasks; using API.Archive; namespace API.Interfaces.Services @@ -12,5 +15,6 @@ namespace API.Interfaces.Services string GetSummaryInfo(string archivePath); ArchiveLibrary CanOpen(string archivePath); bool ArchiveNeedsFlattening(ZipArchive archive); + Task> CreateZipForDownload(IEnumerable files, string tempFolder); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IStatsService.cs b/API/Interfaces/Services/IStatsService.cs new file mode 100644 index 000000000..f91a4e522 --- /dev/null +++ b/API/Interfaces/Services/IStatsService.cs @@ -0,0 +1,13 @@ +using System.Threading.Tasks; +using API.DTOs; + +namespace API.Interfaces.Services +{ + public interface IStatsService + { + Task PathData(ClientInfoDto clientInfoDto); + Task FinalizeStats(); + Task CollectRelevantData(); + Task CollectAndSendStatsData(); + } +} \ No newline at end of file diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index e5c9226b3..5bfe954e8 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -9,10 +9,13 @@ namespace API.Parser { public static class Parser { - public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7"; - public static readonly string BookFileExtensions = @"\.epub"; - public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; - public static readonly Regex FontSrcUrlRegex = new Regex("(src:url\\(\"?'?)([a-z0-9/\\._]+)(\"?'?\\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); + public const string DefaultChapter = "0"; + public const string DefaultVolume = "0"; + + public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7"; + public const string BookFileExtensions = @"\.epub"; + public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; + public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly string XmlRegexExtensions = @"\.xml"; @@ -92,7 +95,7 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) new Regex( - @"(?.*) (\b|_|-)v", + @"(?.*) (\b|_|-)(v|ch\.?|c)\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip // due to duplicate version identifiers in file. @@ -197,6 +200,14 @@ namespace API.Parser new Regex( @"^(?.*)(?: |_)v\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics chapter 25 + new Regex( + @"^(?.*)(?: |_)c(hapter) \d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics issue #25 + new Regex( + @"^(?.*)(?: |_)i(ssue) #\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( @"^(?.*)(?: \d+)", @@ -239,11 +250,11 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) new Regex( - @"^(?.*)(?: |_)(?\d+)", + @"^(?.*)(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( - @"^(?.*)(?\d+))", + @"^(?.*)(?\d+))", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Robin the Teen Wonder #0 new Regex( @@ -281,6 +292,14 @@ namespace API.Parser new Regex( @"^(?.*)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics chapter 25 + new Regex( + @"^(?!Vol)(?.*)( |_)c(hapter)( |_)(?\d*)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics issue #25 + new Regex( + @"^(?!Vol)(?.*)( |_)i(ssue)( |_) #(?\d*)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), }; private static readonly Regex[] ReleaseGroupRegex = new[] @@ -372,10 +391,16 @@ namespace API.Parser { // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. new Regex( - @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)", RegexOptions.IgnoreCase | RegexOptions.Compiled), }; + // If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found. + private static readonly Regex SpecialMarkerRegex = new Regex( + @"(?SP\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled + ); + /// /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed @@ -424,7 +449,7 @@ namespace API.Parser { var folder = fallbackFolders[i]; if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; - if (ParseVolume(folder) != "0" || ParseChapter(folder) != "0") continue; + if (ParseVolume(folder) != DefaultVolume || ParseChapter(folder) != DefaultChapter) continue; var series = ParseSeries(folder); @@ -453,12 +478,22 @@ namespace API.Parser var isSpecial = ParseMangaSpecial(fileName); // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that // could cause a problem as Omake is a special term, but there is valid volume/chapter information. - if (ret.Chapters == "0" && ret.Volumes == "0" && !string.IsNullOrEmpty(isSpecial)) + if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial)) { ret.IsSpecial = true; } - - + + if (HasSpecialMarker(fileName)) + { + ret.IsSpecial = true; + ret.Chapters = DefaultChapter; + ret.Volumes = DefaultVolume; + } + + if (string.IsNullOrEmpty(ret.Series)) + { + ret.Series = CleanTitle(fileName); + } return ret.Series == string.Empty ? null : ret; } @@ -491,6 +526,25 @@ namespace API.Parser return string.Empty; } + /// + /// If the file has SP marker. + /// + /// + /// + public static bool HasSpecialMarker(string filePath) + { + var matches = SpecialMarkerRegex.Matches(filePath); + foreach (Match match in matches) + { + if (match.Groups["Special"].Success && match.Groups["Special"].Value != string.Empty) + { + return true; + } + } + + return false; + } + public static string ParseMangaSpecial(string filePath) { foreach (var regex in MangaSpecialRegex) @@ -560,7 +614,7 @@ namespace API.Parser } } - return "0"; + return DefaultVolume; } public static string ParseComicVolume(string filename) @@ -582,7 +636,7 @@ namespace API.Parser } } - return "0"; + return DefaultVolume; } public static string ParseChapter(string filename) @@ -610,7 +664,7 @@ namespace API.Parser } } - return "0"; + return DefaultChapter; } private static string AddChapterPart(string value) @@ -648,7 +702,7 @@ namespace API.Parser } } - return "0"; + return DefaultChapter; } private static string RemoveEditionTagHolders(string title) @@ -795,12 +849,20 @@ namespace API.Parser public static float MinimumNumberFromRange(string range) { - if (!Regex.IsMatch(range, @"^[\d-.]+$")) + try + { + if (!Regex.IsMatch(range, @"^[\d-.]+$")) + { + return (float) 0.0; + } + + var tokens = range.Replace("_", string.Empty).Split("-"); + return tokens.Min(float.Parse); + } + catch { return (float) 0.0; } - var tokens = range.Replace("_", string.Empty).Split("-"); - return tokens.Min(float.Parse); } public static string Normalize(string name) diff --git a/API/Parser/ParserInfo.cs b/API/Parser/ParserInfo.cs index e49d87e74..a2c4a9c51 100644 --- a/API/Parser/ParserInfo.cs +++ b/API/Parser/ParserInfo.cs @@ -3,7 +3,7 @@ namespace API.Parser { /// - /// This represents a single file + /// This represents all parsed information from a single file /// public class ParserInfo { diff --git a/API/Program.cs b/API/Program.cs index b2612efbe..fc906cca1 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -5,6 +5,7 @@ using System.Threading; using System.Threading.Tasks; using API.Data; using API.Entities; +using API.Services.HostedServices; using Kavita.Common; using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Hosting; @@ -20,13 +21,13 @@ namespace API { public class Program { - private static readonly int HttpPort = 5000; + private static int _httpPort; protected Program() { } - private static string GetAppSettingFilename() + public static string GetAppSettingFilename() { var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); var isDevelopment = environment == Environments.Development; @@ -46,6 +47,9 @@ namespace API var base64 = Convert.ToBase64String(rBytes).Replace("/", ""); Configuration.UpdateJwtToken(GetAppSettingFilename(), base64); } + + // Get HttpPort from Config + _httpPort = Configuration.GetPort(GetAppSettingFilename()); var host = CreateHostBuilder(args).Build(); @@ -61,8 +65,6 @@ namespace API await context.Database.MigrateAsync(); await Seed.SeedRoles(roleManager); await Seed.SeedSettings(context); - // TODO: Remove this in v0.4.2 - await Seed.SeedSeriesMetadata(context); } catch (Exception ex) { @@ -79,7 +81,7 @@ namespace API { webBuilder.UseKestrel((opts) => { - opts.ListenAnyIP(HttpPort, options => + opts.ListenAnyIP(_httpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; }); @@ -106,8 +108,16 @@ namespace API options.BeforeSend = sentryEvent => { if (sentryEvent.Exception != null - && sentryEvent.Exception.Message.Contains("[GetCoverImage] This archive cannot be read:") - && sentryEvent.Exception.Message.Contains("[BookService] ")) + && sentryEvent.Exception.Message.StartsWith("[GetCoverImage]") + && sentryEvent.Exception.Message.StartsWith("[BookService]") + && sentryEvent.Exception.Message.StartsWith("[ExtractArchive]") + && sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]") + && sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]") + && sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]") + && sentryEvent.Exception.Message.Contains("EPUB parsing error") + && sentryEvent.Exception.Message.Contains("Unsupported EPUB version") + && sentryEvent.Exception.Message.Contains("Incorrect EPUB") + && sentryEvent.Exception.Message.Contains("Access is Denied")) { return null; // Don't send this event to Sentry } diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 9adb19c0c..a90d429ed 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -4,12 +4,14 @@ using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Linq; +using System.Threading.Tasks; using System.Xml.Serialization; using API.Archive; using API.Comparators; using API.Extensions; using API.Interfaces.Services; using API.Services.Tasks; +using Kavita.Common; using Microsoft.Extensions.Logging; using Microsoft.IO; using SharpCompress.Archives; @@ -25,13 +27,15 @@ namespace API.Services public class ArchiveService : IArchiveService { private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; private const int ThumbnailWidth = 320; // 153w x 230h private static readonly RecyclableMemoryStreamManager StreamManager = new(); private readonly NaturalSortComparer _comparer; - public ArchiveService(ILogger logger) + public ArchiveService(ILogger logger, IDirectoryService directoryService) { _logger = logger; + _directoryService = directoryService; _comparer = new NaturalSortComparer(); } @@ -216,7 +220,39 @@ namespace API.Services !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); } - + + public async Task> CreateZipForDownload(IEnumerable files, string tempFolder) + { + var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); + var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); + + var tempLocation = Path.Join(tempDirectory, $"{tempFolder}_{dateString}"); + DirectoryService.ExistOrCreate(tempLocation); + if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) + { + throw new KavitaException("Unable to copy files to temp directory archive download."); + } + + var zipPath = Path.Join(tempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); + try + { + ZipFile.CreateFromDirectory(tempLocation, zipPath); + } + catch (AggregateException ex) + { + _logger.LogError(ex, "There was an issue creating temp archive"); + throw new KavitaException("There was an issue creating temp archive"); + } + + + var fileBytes = await _directoryService.ReadFileAsync(zipPath); + + DirectoryService.ClearAndDeleteDirectory(tempLocation); + (new FileInfo(zipPath)).Delete(); + + return Tuple.Create(fileBytes, zipPath); + } + private byte[] CreateThumbnail(string entryName, Stream stream, string formatExtension = ".jpg") { if (!formatExtension.StartsWith(".")) @@ -230,7 +266,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogWarning(ex, "There was an error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName); + _logger.LogWarning(ex, "[GetCoverImage] There was an error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName); } return Array.Empty(); @@ -245,13 +281,13 @@ namespace API.Services { if (!File.Exists(archivePath)) { - _logger.LogError("Archive {ArchivePath} could not be found", archivePath); + _logger.LogWarning("Archive {ArchivePath} could not be found", archivePath); return false; } if (Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath)) return true; - _logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); + _logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath); return false; } @@ -407,7 +443,7 @@ namespace API.Services } catch (Exception e) { - _logger.LogWarning(e, "There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); + _logger.LogWarning(e, "[ExtractArchive] There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); return; } _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 08c4e2209..f36455ff6 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -23,7 +23,7 @@ namespace API.Services private const int ThumbnailWidth = 320; // 153w x 230h private readonly StylesheetParser _cssParser = new (); - + public BookService(ILogger logger) { _logger = logger; @@ -89,7 +89,8 @@ namespace API.Services } else { - anchor.Attributes.Add("target", "_blank"); + anchor.Attributes.Add("target", "_blank"); + anchor.Attributes.Add("rel", "noreferrer noopener"); } return; @@ -167,7 +168,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was an exception getting summary, defaulting to empty string"); + _logger.LogWarning(ex, "[BookService] There was an exception getting summary, defaulting to empty string"); } return string.Empty; @@ -177,13 +178,13 @@ namespace API.Services { if (!File.Exists(filePath)) { - _logger.LogError("[BookService] Book {EpubFile} could not be found", filePath); + _logger.LogWarning("[BookService] Book {EpubFile} could not be found", filePath); return false; } if (Parser.Parser.IsBook(filePath)) return true; - _logger.LogError("[BookService] Book {EpubFile} is not a valid EPUB", filePath); + _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB", filePath); return false; } @@ -198,12 +199,19 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); + _logger.LogWarning(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); } return 0; } + public static string EscapeTags(string content) + { + content = Regex.Replace(content, @")", ""); + content = Regex.Replace(content, @")", ""); + return content; + } + public static string CleanContentKeys(string key) { return key.Replace("../", string.Empty); @@ -234,6 +242,83 @@ namespace API.Services try { using var epubBook = EpubReader.OpenBook(filePath); + + // If the epub has the following tags, we can group the books as Volumes + // + // + // + // If all three are present, we can take that over dc:title and format as: + // Series = The Dark Tower, Volume = 5, Filename as "Wolves of the Calla" + // In addition, the following can exist and should parse as a series (EPUB 3.2 spec) + // + // The Lord of the Rings + // + // set + // 2 + try + { + var seriesIndex = string.Empty; + var series = string.Empty; + var specialName = string.Empty; + var groupPosition = string.Empty; + + + foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) + { + // EPUB 2 and 3 + switch (metadataItem.Name) + { + case "calibre:series_index": + seriesIndex = metadataItem.Content; + break; + case "calibre:series": + series = metadataItem.Content; + break; + case "calibre:title_sort": + specialName = metadataItem.Content; + break; + } + + // EPUB 3.2+ only + switch (metadataItem.Property) + { + case "group-position": + seriesIndex = metadataItem.Content; + break; + case "belongs-to-collection": + series = metadataItem.Content; + break; + case "collection-type": + groupPosition = metadataItem.Content; + break; + } + } + + if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex) && + (!string.IsNullOrEmpty(specialName) || groupPosition.Equals("series") || groupPosition.Equals("set"))) + { + if (string.IsNullOrEmpty(specialName)) + { + specialName = epubBook.Title; + } + return new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Book, + Filename = Path.GetFileName(filePath), + Title = specialName, + FullFilePath = filePath, + IsSpecial = false, + Series = series, + Volumes = seriesIndex.Split(".")[0] + }; + } + } + catch (Exception) + { + // Swallow exception + } return new ParserInfo() { @@ -250,7 +335,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); + _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); } return null; @@ -285,7 +370,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); + _logger.LogWarning(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); } return Array.Empty(); diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index 2ce9b375b..73c279657 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -63,10 +63,6 @@ namespace API.Services } new DirectoryInfo(extractPath).Flatten(); - // if (fileCount > 1) - // { - // new DirectoryInfo(extractPath).Flatten(); - // } return chapter; } diff --git a/API/Services/Clients/StatsApiClient.cs b/API/Services/Clients/StatsApiClient.cs new file mode 100644 index 000000000..d56f53707 --- /dev/null +++ b/API/Services/Clients/StatsApiClient.cs @@ -0,0 +1,62 @@ +using System; +using System.Net.Http; +using System.Net.Http.Json; +using System.Threading; +using System.Threading.Tasks; + +using API.Configurations.CustomOptions; +using API.DTOs; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace API.Services.Clients +{ + public class StatsApiClient + { + private readonly HttpClient _client; + private readonly StatsOptions _options; + private readonly ILogger _logger; + private const string ApiUrl = "https://stats.kavitareader.com"; + + public StatsApiClient(HttpClient client, IOptions options, ILogger logger) + { + _client = client; + _logger = logger; + _options = options.Value ?? throw new ArgumentNullException(nameof(options)); + } + + public async Task SendDataToStatsServer(UsageStatisticsDto data) + { + var responseContent = string.Empty; + + try + { + using var response = await _client.PostAsJsonAsync(ApiUrl + "/api/InstallationStats", data); + + responseContent = await response.Content.ReadAsStringAsync(); + + response.EnsureSuccessStatusCode(); + } + catch (HttpRequestException e) + { + var info = new + { + dataSent = data, + response = responseContent + }; + + _logger.LogError(e, "The StatsServer did not respond successfully. {Content}", info); + + Console.WriteLine(e); + throw; + } + catch (Exception e) + { + _logger.LogError(e, "An error happened during the request to the Stats Server"); + + Console.WriteLine(e); + throw; + } + } + } +} \ No newline at end of file diff --git a/API/Services/ComicInfo.cs b/API/Services/ComicInfo.cs index 8277cfb35..55e823ee4 100644 --- a/API/Services/ComicInfo.cs +++ b/API/Services/ComicInfo.cs @@ -9,6 +9,7 @@ public string Publisher { get; set; } public string Genre { get; set; } public int PageCount { get; set; } + // ReSharper disable once InconsistentNaming public string LanguageISO { get; set; } public string Web { get; set; } } diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 40271ccd0..f823d2066 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -13,6 +13,9 @@ namespace API.Services public class DirectoryService : IDirectoryService { private readonly ILogger _logger; + private static readonly Regex ExcludeDirectories = new Regex( + @"@eaDir|\.DS_Store", + RegexOptions.Compiled | RegexOptions.IgnoreCase); public DirectoryService(ILogger logger) { @@ -102,6 +105,16 @@ namespace API.Services return !Directory.Exists(path) ? Array.Empty() : Directory.GetFiles(path); } + /// + /// Returns the total number of bytes for a given set of full file paths + /// + /// + /// Total bytes + public static long GetTotalSize(IEnumerable paths) + { + return paths.Sum(path => new FileInfo(path).Length); + } + /// /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. /// @@ -212,6 +225,7 @@ namespace API.Services /// Directory to scan /// Action to apply on file path /// Regex pattern to search against + /// /// public static int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger) { @@ -231,11 +245,11 @@ namespace API.Services while (dirs.Count > 0) { var currentDir = dirs.Pop(); - string[] subDirs; + IEnumerable subDirs; string[] files; try { - subDirs = Directory.GetDirectories(currentDir); + subDirs = Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0); } // Thrown if we do not have discovery permission on the directory. catch (UnauthorizedAccessException e) { @@ -306,7 +320,7 @@ namespace API.Services // Push the subdirectories onto the stack for traversal. // This could also be done before handing the files. - foreach (string str in subDirs) + foreach (var str in subDirs) dirs.Push(str); } diff --git a/API/Services/HostedServices/StartupTasksHostedService.cs b/API/Services/HostedServices/StartupTasksHostedService.cs new file mode 100644 index 000000000..95f87006e --- /dev/null +++ b/API/Services/HostedServices/StartupTasksHostedService.cs @@ -0,0 +1,54 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using API.Interfaces; +using API.Interfaces.Services; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace API.Services.HostedServices +{ + public class StartupTasksHostedService : IHostedService + { + private readonly IServiceProvider _provider; + + public StartupTasksHostedService(IServiceProvider serviceProvider) + { + _provider = serviceProvider; + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + using var scope = _provider.CreateScope(); + + var taskScheduler = scope.ServiceProvider.GetRequiredService(); + taskScheduler.ScheduleTasks(); + + try + { + await ManageStartupStatsTasks(scope, taskScheduler); + } + catch (Exception) + { + //If stats startup fail the user can keep using the app + } + } + + private async Task ManageStartupStatsTasks(IServiceScope serviceScope, ITaskScheduler taskScheduler) + { + var unitOfWork = serviceScope.ServiceProvider.GetRequiredService(); + + var settingsDto = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + + if (!settingsDto.AllowStatCollection) return; + + taskScheduler.ScheduleStatsTasks(); + + var statsService = serviceScope.ServiceProvider.GetRequiredService(); + + await statsService.CollectAndSendStatsData(); + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; + } +} \ No newline at end of file diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index 122fc90c6..6ee2856ab 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -158,7 +158,7 @@ namespace API.Services } - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) { _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds); } @@ -191,7 +191,7 @@ namespace API.Services _unitOfWork.SeriesRepository.Update(series); - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) { _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); } diff --git a/API/Services/StatsService.cs b/API/Services/StatsService.cs new file mode 100644 index 000000000..2c315c99d --- /dev/null +++ b/API/Services/StatsService.cs @@ -0,0 +1,186 @@ +using System; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using API.Data; +using API.DTOs; +using API.Interfaces; +using API.Interfaces.Services; +using API.Services.Clients; +using Kavita.Common; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Services +{ + public class StatsService : IStatsService + { + private const string TempFilePath = "stats/"; + private const string TempFileName = "app_stats.json"; + + private readonly StatsApiClient _client; + private readonly DataContext _dbContext; + private readonly ILogger _logger; + private readonly IUnitOfWork _unitOfWork; + + public StatsService(StatsApiClient client, DataContext dbContext, ILogger logger, + IUnitOfWork unitOfWork) + { + _client = client; + _dbContext = dbContext; + _logger = logger; + _unitOfWork = unitOfWork; + } + + private static string FinalPath => Path.Combine(Directory.GetCurrentDirectory(), TempFilePath, TempFileName); + private static bool FileExists => File.Exists(FinalPath); + + public async Task PathData(ClientInfoDto clientInfoDto) + { + _logger.LogInformation("Pathing client data to the file"); + + var statisticsDto = await GetData(); + + statisticsDto.AddClientInfo(clientInfoDto); + + await SaveFile(statisticsDto); + } + + public async Task CollectRelevantData() + { + _logger.LogInformation("Collecting data from the server and database"); + + _logger.LogInformation("Collecting usage info"); + var usageInfo = await GetUsageInfo(); + + _logger.LogInformation("Collecting server info"); + var serverInfo = GetServerInfo(); + + await PathData(serverInfo, usageInfo); + } + + public async Task FinalizeStats() + { + try + { + _logger.LogInformation("Finalizing Stats collection flow"); + + var data = await GetExistingData(); + + _logger.LogInformation("Sending data to the Stats server"); + await _client.SendDataToStatsServer(data); + + _logger.LogInformation("Deleting the file from disk"); + if (FileExists) File.Delete(FinalPath); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error Finalizing Stats collection flow"); + throw; + } + } + + public async Task CollectAndSendStatsData() + { + await CollectRelevantData(); + await FinalizeStats(); + } + + private async Task PathData(ServerInfoDto serverInfoDto, UsageInfoDto usageInfoDto) + { + _logger.LogInformation("Pathing server and usage info to the file"); + + var data = await GetData(); + + data.ServerInfo = serverInfoDto; + data.UsageInfo = usageInfoDto; + + data.MarkAsUpdatedNow(); + + await SaveFile(data); + } + + private async ValueTask GetData() + { + if (!FileExists) return new UsageStatisticsDto {InstallId = HashUtil.AnonymousToken()}; + + return await GetExistingData(); + } + + private async Task GetUsageInfo() + { + var usersCount = await _dbContext.Users.CountAsync(); + + var libsCountByType = await _dbContext.Library + .AsNoTracking() + .GroupBy(x => x.Type) + .Select(x => new LibInfo {Type = x.Key, Count = x.Count()}) + .ToArrayAsync(); + + var uniqueFileTypes = await _unitOfWork.FileRepository.GetFileExtensions(); + + var usageInfo = new UsageInfoDto + { + UsersCount = usersCount, + LibraryTypesCreated = libsCountByType, + FileTypes = uniqueFileTypes + }; + + return usageInfo; + } + + private static ServerInfoDto GetServerInfo() + { + var serverInfo = new ServerInfoDto + { + Os = RuntimeInformation.OSDescription, + DotNetVersion = Environment.Version.ToString(), + RunTimeVersion = RuntimeInformation.FrameworkDescription, + KavitaVersion = BuildInfo.Version.ToString(), + Culture = Thread.CurrentThread.CurrentCulture.Name, + BuildBranch = BuildInfo.Branch + }; + + return serverInfo; + } + + private async Task GetExistingData() + { + _logger.LogInformation("Fetching existing data from file"); + var existingDataJson = await GetFileDataAsString(); + + _logger.LogInformation("Deserializing data from file to object"); + var existingData = JsonSerializer.Deserialize(existingDataJson); + + return existingData; + } + + private async Task GetFileDataAsString() + { + _logger.LogInformation("Reading file from disk"); + return await File.ReadAllTextAsync(FinalPath); + } + + private async Task SaveFile(UsageStatisticsDto statisticsDto) + { + _logger.LogInformation("Saving file"); + + var finalDirectory = FinalPath.Replace(TempFileName, string.Empty); + if (!Directory.Exists(finalDirectory)) + { + _logger.LogInformation("Creating tmp directory"); + Directory.CreateDirectory(finalDirectory); + } + + _logger.LogInformation("Serializing data to write"); + var dataJson = JsonSerializer.Serialize(statisticsDto); + + _logger.LogInformation("Writing file to the disk"); + await File.WriteAllTextAsync(FinalPath, dataJson); + } + } +} \ No newline at end of file diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index b284fd9f7..61ee114b3 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -19,11 +19,14 @@ namespace API.Services private readonly IBackupService _backupService; private readonly ICleanupService _cleanupService; + private readonly IStatsService _statsService; + public static BackgroundJobServer Client => new BackgroundJobServer(); public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, - IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService) + IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, + ICleanupService cleanupService, IStatsService statsService) { _cacheService = cacheService; _logger = logger; @@ -32,6 +35,7 @@ namespace API.Services _metadataService = metadataService; _backupService = backupService; _cleanupService = cleanupService; + _statsService = statsService; } public void ScheduleTasks() @@ -65,6 +69,33 @@ namespace API.Services RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily); } + #region StatsTasks + + private const string SendDataTask = "finalize-stats"; + public void ScheduleStatsTasks() + { + var allowStatCollection = bool.Parse(Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.AllowStatCollection)).GetAwaiter().GetResult().Value); + if (!allowStatCollection) + { + _logger.LogDebug("User has opted out of stat collection, not registering tasks"); + return; + } + + _logger.LogDebug("Adding StatsTasks"); + + _logger.LogDebug("Scheduling Send data to the Stats server {Setting}", nameof(Cron.Daily)); + RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily); + } + + public void CancelStatsTasks() + { + _logger.LogDebug("Cancelling/Removing StatsTasks"); + + RecurringJob.RemoveIfExists(SendDataTask); + } + + #endregion + public void ScanLibrary(int libraryId, bool forceUpdate = false) { _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index e22803c4b..232e8fce0 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -89,7 +89,7 @@ namespace API.Services.Tasks UpdateLibrary(library, series); _unitOfWork.LibraryRepository.Update(library); - if (Task.Run(() => _unitOfWork.Complete()).Result) + if (Task.Run(() => _unitOfWork.CommitAsync()).Result) { _logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); } @@ -466,7 +466,7 @@ namespace API.Services.Tasks return; } - if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0") + if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume) { info = Parser.Parser.Parse(path, rootPath, type); var info2 = _bookService.ParseInfo(path); diff --git a/API/Startup.cs b/API/Startup.cs index 82fd667a3..f2b648d24 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -2,9 +2,9 @@ using System; using System.IO.Compression; using System.Linq; using API.Extensions; -using API.Interfaces; using API.Middleware; using API.Services; +using API.Services.HostedServices; using Hangfire; using Hangfire.MemoryStorage; using Kavita.Common.EnvironmentInfo; @@ -24,16 +24,18 @@ namespace API public class Startup { private readonly IConfiguration _config; + private readonly IWebHostEnvironment _env; - public Startup(IConfiguration config) + public Startup(IConfiguration config, IWebHostEnvironment env) { _config = config; + _env = env; } // This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { - services.AddApplicationServices(_config); + services.AddApplicationServices(_config, _env); services.AddControllers(); services.Configure(options => { @@ -62,6 +64,8 @@ namespace API services.AddResponseCaching(); + services.AddStatsClient(_config); + services.AddHangfire(configuration => configuration .UseSimpleAssemblyNameTypeSerializer() .UseRecommendedSerializerSettings() @@ -69,11 +73,15 @@ namespace API // Add the processing server as IHostedService services.AddHangfireServer(); + + // Add IHostedService for startup tasks + // Any services that should be bootstrapped go here + services.AddHostedService(); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, - IHostApplicationLifetime applicationLifetime, ITaskScheduler taskScheduler) + IHostApplicationLifetime applicationLifetime) { app.UseMiddleware(); @@ -135,9 +143,6 @@ namespace API { Console.WriteLine($"Kavita - v{BuildInfo.Version}"); }); - - // Any services that should be bootstrapped go here - taskScheduler.ScheduleTasks(); } private void OnShutdown() diff --git a/API/appsettings.Development.json b/API/appsettings.Development.json index 4dd015431..35e9218b9 100644 --- a/API/appsettings.Development.json +++ b/API/appsettings.Development.json @@ -3,6 +3,11 @@ "DefaultConnection": "Data source=kavita.db" }, "TokenKey": "super secret unguessable key", + "StatsOptions": { + "ServerUrl": "http://localhost:5002", + "ServerSecret": "here's where the api key goes", + "SendDataAt": "23:50" + }, "Logging": { "LogLevel": { "Default": "Debug", @@ -17,5 +22,6 @@ "FileSizeLimitBytes": 0, "MaxRollingFiles": 0 } - } + }, + "Port": 5000 } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..ab4a8a30e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,56 @@ +# How to Contribute # + +We're always looking for people to help make Kavita even better, there are a number of ways to contribute. + +## Documentation ## +Setup guides, FAQ, the more information we have on the [wiki](https://github.com/Kareadita/Kavita/wiki) the better. + +## Development ## + +### Tools required ### +- Visual Studio 2019 or higher (https://www.visualstudio.com/vs/). The community version is free and works fine. [Download it here](https://www.visualstudio.com/downloads/). +- Rider (optional to Visual Studio) (https://www.jetbrains.com/rider/) +- HTML/Javascript editor of choice (VS Code/Sublime Text/Webstorm/Atom/etc) +- [Git](https://git-scm.com/downloads) +- [NodeJS](https://nodejs.org/en/download/) (Node 14.X.X or higher) +- .NET 5.0+ + +### Getting started ### + +1. Fork Kavita +2. Clone the repository into your development machine. [*info*](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository-from-github) + - Kavita as of v0.4.2 requires Kavita-webui to be cloned next to the Kavita. Fork and clone this as well. +3. Install the required Node Packages + - cd kavita-webui + - `npm install` + - `npm install -g @angular/cli` +4. Start webui server `ng serve` +5. Build the project in Visual Studio/Rider, Setting startup project to `API` +6. Debug the project in Visual Studio/Rider +7. Open http://localhost:4200 +8. (Deployment only) Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs. + + +### Contributing Code ### +- If you're adding a new, already requested feature, please comment on [Github Issues](https://github.com/Kareadita/Kavita/issues "Github Issues") so work is not duplicated (If you want to add something not already on there, please talk to us first) +- Rebase from Kavita's develop branch, don't merge +- Make meaningful commits, or squash them +- Feel free to make a pull request before work is complete, this will let us see where its at and make comments/suggest improvements +- Reach out to us on the discord if you have any questions +- Add tests (unit/integration) +- Commit with *nix line endings for consistency (We checkout Windows and commit *nix) +- One feature/bug fix per pull request to keep things clean and easy to understand +- Use 4 spaces instead of tabs, this is the default for VS 2019 and WebStorm (to my knowledge) + - Use 2 spaces for Kavita-webui files + +### Pull Requesting ### +- Only make pull requests to develop, never master, if you make a PR to master we'll comment on it and close it +- You're probably going to get some comments or questions from us, they will be to ensure consistency and maintainability +- We'll try to respond to pull requests as soon as possible, if its been a day or two, please reach out to us, we may have missed it +- Each PR should come from its own [feature branch](http://martinfowler.com/bliki/FeatureBranch.html) not develop in your fork, it should have a meaningful branch name (what is being added/fixed) + - new-feature (Good) + - fix-bug (Good) + - patch (Bad) + - develop (Bad) + +If you have any questions about any of this, please let us know. diff --git a/Dockerfile b/Dockerfile index 7f4e8ac71..4b1fb47b7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,35 +1,30 @@ -#This Dockerfile pulls the latest git commit and builds Kavita from source -FROM mcr.microsoft.com/dotnet/sdk:5.0-focal AS builder +#This Dockerfile creates a build for all architectures -MAINTAINER Chris P +#Image that copies in the files and passes them to the main image +FROM ubuntu:focal AS copytask -ENV DEBIAN_FRONTEND=noninteractive ARG TARGETPLATFORM -#Installs nodejs and npm -RUN curl -fsSL https://deb.nodesource.com/setup_14.x | bash - \ - && apt-get install -y nodejs \ - && rm -rf /var/lib/apt/lists/* - -#Builds app based on platform -COPY build_target.sh /build_target.sh -RUN /build_target.sh +#Move the output files to where they need to be +RUN mkdir /files +COPY _output/*.tar.gz /files/ +COPY Kavita-webui/dist /files/wwwroot +COPY copy_runtime.sh /copy_runtime.sh +RUN /copy_runtime.sh #Production image FROM ubuntu:focal -MAINTAINER Chris P - -#Move the output files to where they need to be -COPY --from=builder /Projects/Kavita/_output/build/Kavita /kavita +COPY --from=copytask /Kavita /kavita +COPY --from=copytask /files/wwwroot /kavita/wwwroot #Installs program dependencies RUN apt-get update \ && apt-get install -y libicu-dev libssl1.1 pwgen \ && rm -rf /var/lib/apt/lists/* -#Creates the manga storage directory -RUN mkdir /manga /kavita/data +#Creates the data directory +RUN mkdir /kavita/data RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \ && sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json diff --git a/Dockerfile.alpine b/Dockerfile.alpine deleted file mode 100644 index faacfa823..000000000 --- a/Dockerfile.alpine +++ /dev/null @@ -1,28 +0,0 @@ -#This Dockerfile is for the musl alpine build of Kavita. -FROM alpine:latest - -MAINTAINER Chris P - -#Installs the needed dependencies -RUN apk update && apk add --no-cache wget curl pwgen icu-dev bash - -#Downloads Kavita, unzips and moves the folders to where they need to be -RUN wget https://github.com/Kareadita/Kavita/releases/download/v0.3.7/kavita-linux-musl-x64.tar.gz \ - && tar -xzf kavita*.tar.gz \ - && mv Kavita/ /kavita/ \ - && rm kavita*.gz \ - && chmod +x /kavita/Kavita - -#Creates the needed folders -RUN mkdir /manga /kavita/data /kavita/temp /kavita/cache - -RUN sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json - -COPY entrypoint.sh /entrypoint.sh - -EXPOSE 5000 - -WORKDIR /kavita - -ENTRYPOINT ["/bin/bash"] -CMD ["/entrypoint.sh"] diff --git a/Dockerfile.arm b/Dockerfile.arm deleted file mode 100644 index e28430a38..000000000 --- a/Dockerfile.arm +++ /dev/null @@ -1,27 +0,0 @@ -#This Dockerfile pulls the latest git commit and builds Kavita from source - -#Production image -FROM ubuntu:focal - -#Move the output files to where they need to be -COPY Kavita /kavita - -#Installs program dependencies -RUN apt-get update \ - && apt-get install -y libicu-dev libssl1.1 pwgen \ - && rm -rf /var/lib/apt/lists/* - -#Creates the manga storage directory -RUN mkdir /kavita/data - -RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \ - && sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json - -COPY entrypoint.sh /entrypoint.sh - -EXPOSE 5000 - -WORKDIR /kavita - -ENTRYPOINT ["/bin/bash"] -CMD ["/entrypoint.sh"] diff --git a/FUNDING.yml b/FUNDING.yml new file mode 100644 index 000000000..a5717dec4 --- /dev/null +++ b/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: ["https://paypal.me/majora2007"] diff --git a/Kavita.Common/Configuration.cs b/Kavita.Common/Configuration.cs index 02a01c9d8..755e57743 100644 --- a/Kavita.Common/Configuration.cs +++ b/Kavita.Common/Configuration.cs @@ -1,12 +1,13 @@ using System; using System.IO; using System.Text.Json; +using Kavita.Common.EnvironmentInfo; namespace Kavita.Common { public static class Configuration { - + #region JWT Token public static bool CheckIfJwtTokenSet(string filePath) { try { @@ -28,7 +29,6 @@ namespace Kavita.Common return false; } - public static bool UpdateJwtToken(string filePath, string token) { try @@ -42,5 +42,93 @@ namespace Kavita.Common return false; } } + #endregion + #region Port + public static bool UpdatePort(string filePath, int port) + { + if (new OsInfo(Array.Empty()).IsDocker) + { + return true; + } + + try + { + var currentPort = GetPort(filePath); + var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port); + File.WriteAllText(filePath, json); + return true; + } + catch (Exception) + { + return false; + } + } + public static int GetPort(string filePath) + { + const int defaultPort = 5000; + if (new OsInfo(Array.Empty()).IsDocker) + { + return defaultPort; + } + + try { + var json = File.ReadAllText(filePath); + var jsonObj = JsonSerializer.Deserialize(json); + const string key = "Port"; + + if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) + { + return tokenElement.GetInt32(); + } + } + catch (Exception ex) { + Console.WriteLine("Error writing app settings: " + ex.Message); + } + + return defaultPort; + } + #endregion + #region LogLevel + public static bool UpdateLogLevel(string filePath, string logLevel) + { + try + { + var currentLevel = GetLogLevel(filePath); + var json = File.ReadAllText(filePath).Replace($"\"Default\": \"{currentLevel}\"", $"\"Default\": \"{logLevel}\""); + File.WriteAllText(filePath, json); + return true; + } + catch (Exception) + { + return false; + } + } + public static string GetLogLevel(string filePath) + { + try { + var json = File.ReadAllText(filePath); + var jsonObj = JsonSerializer.Deserialize(json); + if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement)) + { + foreach (var property in tokenElement.EnumerateObject()) + { + if (!property.Name.Equals("LogLevel")) continue; + foreach (var logProperty in property.Value.EnumerateObject()) + { + if (logProperty.Name.Equals("Default")) + { + return logProperty.Value.GetString(); + } + } + } + } + } + catch (Exception ex) { + Console.WriteLine("Error writing app settings: " + ex.Message); + } + + return "Information"; + } + #endregion } } \ No newline at end of file diff --git a/Kavita.Common/Kavita.Common.csproj b/Kavita.Common/Kavita.Common.csproj index 43fe7f53d..3b92d09f8 100644 --- a/Kavita.Common/Kavita.Common.csproj +++ b/Kavita.Common/Kavita.Common.csproj @@ -4,7 +4,7 @@ net5.0 kareadita.github.io Kavita - 0.4.1.0 + 0.4.2.0 en diff --git a/Logo/dottrace.svg b/Logo/dottrace.svg new file mode 100644 index 000000000..b879517cd --- /dev/null +++ b/Logo/dottrace.svg @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/jetbrains.svg b/Logo/jetbrains.svg new file mode 100644 index 000000000..75d4d2177 --- /dev/null +++ b/Logo/jetbrains.svg @@ -0,0 +1,66 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/kavita.svg b/Logo/kavita.svg new file mode 100644 index 000000000..f56f8a7c5 --- /dev/null +++ b/Logo/kavita.svg @@ -0,0 +1,124 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/resharper.svg b/Logo/resharper.svg new file mode 100644 index 000000000..24c987a78 --- /dev/null +++ b/Logo/resharper.svg @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/rider.svg b/Logo/rider.svg new file mode 100644 index 000000000..82da35b0b --- /dev/null +++ b/Logo/rider.svg @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rider + + + + + + + + + + + + + + diff --git a/Logo/sentry.svg b/Logo/sentry.svg new file mode 100644 index 000000000..40bd18594 --- /dev/null +++ b/Logo/sentry.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/README.md b/README.md index a3fd09193..60db687a1 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kavita +# []() Kavita
![Cover Image](https://github.com/Kareadita/kareadita.github.io/blob/main/img/features/seriesdetail.PNG?raw=true) @@ -9,44 +9,40 @@ your reading collection with your friends and family! [![Release](https://img.shields.io/github/release/Kareadita/Kavita.svg?style=flat&maxAge=3600)](https://github.com/Kareadita/Kavita/releases) [![License](https://img.shields.io/badge/license-GPLv3-blue.svg?style=flat)](https://github.com/Kareadita/Kavita/blob/master/LICENSE) -[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem) [![Downloads](https://img.shields.io/github/downloads/Kareadita/Kavita/total.svg?style=flat)](https://github.com/Kareadita/Kavita/releases) [![Docker Pulls](https://img.shields.io/docker/pulls/kizaing/kavita.svg)](https://hub.docker.com/r/kizaing/kavita/) -[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=alert_status)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=security_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) -[![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) +[![Backers on Open Collective](https://opencollective.com/kavita/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/kavita/sponsors/badge.svg)](#sponsors)
-## Goals: +## Goals - [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf) -- [x] First class responsive readers that work great on any device -- [x] Provide a dark theme for web app +- [x] First class responsive readers that work great on any device (phone, tablet, desktop) +- [x] Dark and Light themes - [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books - [ ] Metadata should allow for collections, want to read integration from 3rd party services, genres. - [x] Ability to manage users, access, and ratings - [ ] Ability to sync ratings and reviews to external services -- [x] Fully Accessible +- [x] Fully Accessible with active accessibility audits +- [x] Dedicated webtoon reader (in beta testing) - [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects) +## Support +[![Reddit](https://img.shields.io/badge/reddit-discussion-FF4500.svg?maxAge=60)](https://www.reddit.com/r/KavitaManga/) +[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem) +[![GitHub - Bugs and Feature Requests Only](https://img.shields.io/badge/github-issues-red.svg?maxAge=60)](https://github.com/Kareadita/Kavita/issues) -# How to contribute -- Ensure you've cloned Kavita-webui. You should have Projects/Kavita and Projects/Kavita-webui -- In Kavita-webui, run ng serve. This will start the webserver on localhost:4200 -- Run API project in Kavita, this will start the backend on localhost:5000 - - -## Deploy local build -- Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs. - -## How to install +## Setup +### Non-Docker - Unzip the archive for your target OS - Place in a directory that is writable. If on windows, do not place in Program Files - Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once) - Run Kavita - If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices. -## Docker +### Docker Running your Kavita server in docker is super easy! Barely an inconvenience. You can run it with this command: ``` @@ -72,17 +68,49 @@ services: restart: unless-stopped ``` -**Note: Kavita is under heavy development and is being updated all the time, so the tag for current builds is :nightly. The :latest tag will be the latest stable release. There is also the :alpine tag if you want a smaller image, but it is only available for x64 systems.** +**Note: Kavita is under heavy development and is being updated all the time, so the tag for current builds is `:nightly`. The `:latest` tag will be the latest stable release. There is also the `:alpine` tag if you want a smaller image, but it is only available for x64 systems.** -## Got an Idea? -Got a great idea? Throw it up on the FeatHub or vote on another persons. Please check the [Project Board](https://github.com/Kareadita/Kavita/projects) first for a list of planned features. +## Feature Requests +Got a great idea? Throw it up on the FeatHub or vote on another idea. Please check the [Project Board](https://github.com/Kareadita/Kavita/projects) first for a list of planned features. [![Feature Requests](https://feathub.com/Kareadita/Kavita?format=svg)](https://feathub.com/Kareadita/Kavita) -## Want to help? -I am looking for developers with a passion for building the next Plex for Reading. Developers with C#/ASP.NET, Angular 11 please reach out on [Discord](https://discord.gg/eczRp9eeem). + +## Contributors + +This project exists thanks to all the people who contribute. [Contribute](CONTRIBUTING.md). + + ## Donate -If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will -likely go towards beer or hosting. -[![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) +If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will go towards +expenses related to Kavita. Back us through [OpenCollective](https://opencollective.com/Kavita#backer). + +## Backers + +Thank you to all our backers! 🙏 [Become a backer](https://opencollective.com/Kavita#backer) + + + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [Become a sponsor](https://opencollective.com/Kavita#sponsor) + + + +## Mega Sponsors + + +## JetBrains +Thank you to [ JetBrains](http://www.jetbrains.com/) for providing us with free licenses to their great tools. + +* [ Rider](http://www.jetbrains.com/rider/) +* [ dotTrace](http://www.jetbrains.com/dottrace/) + +## Sentry +Thank you to [ Sentry](https://sentry.io/welcome/) for providing us with free license to their software. + +### License + +* [GNU GPL v3](http://www.gnu.org/licenses/gpl.html) +* Copyright 2010-2021 \ No newline at end of file diff --git a/action-build.sh b/action-build.sh new file mode 100755 index 000000000..b4c82b6d0 --- /dev/null +++ b/action-build.sh @@ -0,0 +1,103 @@ +#! /bin/bash +set -e + +outputFolder='_output' + +ProgressStart() +{ + echo "Start '$1'" +} + +ProgressEnd() +{ + echo "Finish '$1'" +} + +Build() +{ + local RID="$1" + + ProgressStart "Build for $RID" + + slnFile=Kavita.sln + + dotnet clean $slnFile -c Release + + dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID + + ProgressEnd "Build for $RID" +} + +Package() +{ + local framework="$1" + local runtime="$2" + local lOutputFolder=../_output/"$runtime"/Kavita + + ProgressStart "Creating $runtime Package for $framework" + + # TODO: Use no-restore? Because Build should have already done it for us + echo "Building" + cd API + echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + + echo "Renaming API -> Kavita" + mv "$lOutputFolder"/API "$lOutputFolder"/Kavita + + echo "Copying webui wwwroot to build" + cp -r wwwroot/* "$lOutputFolder"/wwwroot/ + + echo "Copying Install information" + cp ../INSTALL.txt "$lOutputFolder"/README.txt + + echo "Copying LICENSE" + cp ../LICENSE "$lOutputFolder"/LICENSE.txt + + echo "Creating tar" + cd ../$outputFolder/"$runtime"/ + tar -czvf ../kavita-$runtime.tar.gz Kavita + + ProgressEnd "Creating $runtime Package for $framework" + +} + +BuildUI() +{ + ProgressStart 'Building UI' + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* + cd ../Kavita-webui/ || exit + echo 'Installing web dependencies' + npm install + echo 'Building UI' + npm run prod + ls -l dist + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot + ls -l ../Kavita/API/wwwroot + cd ../Kavita/ || exit + ProgressEnd 'Building UI' +} + +dir=$PWD + +if [ -d _output ] +then + rm -r _output/ +fi + +#Build for x64 +Build "linux-x64" +Package "net5.0" "linux-x64" +cd "$dir" + +#Build for arm +Build "linux-arm" +Package "net5.0" "linux-arm" +cd "$dir" + +#Build for arm64 +Build "linux-arm64" +Package "net5.0" "linux-arm64" +cd "$dir" \ No newline at end of file diff --git a/build.sh b/build.sh index 043cb559f..7e137a790 100644 --- a/build.sh +++ b/build.sh @@ -15,6 +15,7 @@ ProgressEnd() UpdateVersionNumber() { + # TODO: Enhance this to increment version number in KavitaCommon.csproj if [ "$KAVITAVERSION" != "" ]; then echo "Updating Version Info" sed -i'' -e "s/[0-9.*]\+<\/AssemblyVersion>/$KAVITAVERSION<\/AssemblyVersion>/g" src/Directory.Build.props @@ -31,7 +32,6 @@ Build() slnFile=Kavita.sln - dotnet clean $slnFile -c Debug dotnet clean $slnFile -c Release if [[ -z "$RID" ]]; @@ -47,9 +47,15 @@ Build() BuildUI() { ProgressStart 'Building UI' + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* cd ../Kavita-webui/ || exit + echo 'Installing web dependencies' npm install + echo 'Building UI' npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot cd ../Kavita/ || exit ProgressEnd 'Building UI' } @@ -67,6 +73,9 @@ Package() cd API echo dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + + echo "Recopying wwwroot due to bug" + cp -r ./wwwroot/* $lOutputFolder/wwwroot echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt @@ -92,8 +101,8 @@ Package() RID="$1" -Build BuildUI +Build dir=$PWD diff --git a/build_target.sh b/build_target.sh deleted file mode 100644 index 56c54ba79..000000000 --- a/build_target.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -mkdir Projects - -cd Projects - -git clone https://github.com/Kareadita/Kavita.git -git clone https://github.com/Kareadita/Kavita-webui.git - -cd Kavita -chmod +x build.sh - -#Builds program based on the target platform - -if [ "$TARGETPLATFORM" == "linux/amd64" ] -then - ./build.sh linux-x64 - mv /Projects/Kavita/_output/linux-x64 /Projects/Kavita/_output/build -elif [ "$TARGETPLATFORM" == "linux/arm/v7" ] -then - ./build.sh linux-arm - mv /Projects/Kavita/_output/linux-arm /Projects/Kavita/_output/build -elif [ "$TARGETPLATFORM" == "linux/arm64" ] -then - ./build.sh linux-arm64 - mv /Projects/Kavita/_output/linux-arm64 /Projects/Kavita/_output/build -fi diff --git a/copy_runtime.sh b/copy_runtime.sh new file mode 100755 index 000000000..c7f2ccabd --- /dev/null +++ b/copy_runtime.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +#Copies the correct version of Kavita into the image + +set -xv + +if [ "$TARGETPLATFORM" == "linux/amd64" ] +then + tar xf /files/kavita-linux-x64.tar.gz -C / +elif [ "$TARGETPLATFORM" == "linux/arm/v7" ] +then + tar xf /files/kavita-linux-arm.tar.gz -C / +elif [ "$TARGETPLATFORM" == "linux/arm64" ] +then + tar xf /files/kavita-linux-arm64.tar.gz -C / +fi diff --git a/docker-build.sh b/docker-build.sh new file mode 100644 index 000000000..a0adc4bbf --- /dev/null +++ b/docker-build.sh @@ -0,0 +1,111 @@ +#! /bin/bash +set -e + +outputFolder='_output' + +ProgressStart() +{ + echo "Start '$1'" +} + +ProgressEnd() +{ + echo "Finish '$1'" +} + +Build() +{ + local RID="$1" + + ProgressStart 'Build for $RID' + + slnFile=Kavita.sln + + dotnet clean $slnFile -c Debug + dotnet clean $slnFile -c Release + + dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID + + ProgressEnd 'Build for $RID' +} + +BuildUI() +{ + ProgressStart 'Building UI' + cd ../Kavita-webui/ || exit + npm install + npm run prod + cd ../Kavita/ || exit + ProgressEnd 'Building UI' + + ProgressStart 'Building UI' + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* + cd ../Kavita-webui/ || exit + echo 'Installing web dependencies' + npm install + echo 'Building UI' + npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot + cd ../Kavita/ || exit + ProgressEnd 'Building UI' +} + +Package() +{ + local framework="$1" + local runtime="$2" + local lOutputFolder=../_output/"$runtime"/Kavita + + ProgressStart "Creating $runtime Package for $framework" + + # TODO: Use no-restore? Because Build should have already done it for us + echo "Building" + cd API + echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + + echo "Copying Install information" + cp ../INSTALL.txt "$lOutputFolder"/README.txt + + echo "Copying LICENSE" + cp ../LICENSE "$lOutputFolder"/LICENSE.txt + + echo "Renaming API -> Kavita" + mv "$lOutputFolder"/API "$lOutputFolder"/Kavita + + echo "Creating tar" + cd ../$outputFolder/"$runtime"/ + tar -czvf ../kavita-$runtime.tar.gz Kavita + + ProgressEnd "Creating $runtime Package for $framework" + +} + +dir=$PWD + +if [ -d _output ] +then + rm -r _output/ +fi + +BuildUI + +#Build for x64 +Build "linux-x64" +Package "net5.0" "linux-x64" +cd "$dir" + +#Build for arm +Build "linux-arm" +Package "net5.0" "linux-arm" +cd "$dir" + +#Build for arm64 +Build "linux-arm64" +Package "net5.0" "linux-arm64" +cd "$dir" + +#Builds Docker images +docker buildx build -t kizaing/kavita:nightly --platform linux/amd64,linux/arm/v7,linux/arm64 . --push \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh index 87d10d6ec..aaa898a7c 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -13,7 +13,7 @@ then rm /kavita/appsettings.json ln -s /kavita/data/appsettings.json /kavita/ else - mv /kavita/appsettings.json /kavita/data/ + mv /kavita/appsettings.json /kavita/data/ || true ln -s /kavita/data/appsettings.json /kavita/ fi @@ -55,11 +55,11 @@ then else if [ -d /kavita/data/logs ] then - touch /kavita/data/logs/kavita.log + echo "" > /kavita/data/logs/kavita.log || true ln -s /kavita/data/logs/kavita.log /kavita/ else mkdir /kavita/data/logs - touch /kavita/data/logs/kavita.log + echo "" > /kavita/data/logs/kavita.log || true ln -s /kavita/data/logs/kavita.log /kavita/ fi