Merge branch 'develop'

This commit is contained in:
Joseph Milazzo 2021-07-25 11:07:56 -05:00
commit dbcec66b10
409 changed files with 55545 additions and 1598 deletions

17
.browserslistrc Normal file
View File

@ -0,0 +1,17 @@
# This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
# For additional information regarding the format and rule options, please see:
# https://github.com/browserslist/browserslist#queries
# For the full list of supported browsers by the Angular framework, please see:
# https://angular.io/guide/browser-support
# You can see what browsers were selected by your queries by running:
# npx browserslist
last 1 Chrome version
last 1 Firefox version
last 2 Edge major versions
last 2 Safari major versions
last 2 iOS major versions
Firefox ESR
not IE 11 # Angular supports IE 11 only as an opt-in. To opt-in, remove the 'not' prefix on this line.

18
.editorconfig Normal file
View File

@ -0,0 +1,18 @@
# Editor configuration, see https://editorconfig.org
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
[*.ts]
quote_type = single
indent_size = 2
[*.md]
max_line_length = off
trim_trailing_whitespace = false

2
.github/FUNDING.yml vendored
View File

@ -9,4 +9,4 @@ community_bridge: # Replace with a single Community Bridge project-name e.g., cl
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ["https://paypal.me/majora2007"]
custom: []

View File

@ -1,17 +0,0 @@
name: Release messages to discord announcement channel
on:
release:
types:
- created
jobs:
run_main:
runs-on: ubuntu-18.04
name: Sends custom message
steps:
- name: Sending message
uses: nhevia/discord-styled-releases@main
with:
webhook_id: ${{ secrets.DISCORD_WEBHOOK_ID }}
webhook_token: ${{ secrets.DISCORD_WEBHOOK_TOKEN }}

View File

@ -7,26 +7,19 @@ on:
jobs:
docker:
name: Building Nightly Docker
runs-on: ubuntu-latest
steps:
- name: Check Out Repo
uses: actions/checkout@v2
- name: Check Out WebUI
uses: actions/checkout@v2
with:
repository: Kareadita/Kavita-webui
ref: develop
path: Kavita-webui/
- name: NodeJS to Compile WebUI
uses: actions/setup-node@v2.1.5
with:
node-version: '14'
- run: |
cd Kavita-webui/ || exit
cd UI/Web || exit
echo 'Installing web dependencies'
npm install
@ -34,15 +27,31 @@ jobs:
npm run prod
echo 'Copying back to Kavita wwwroot'
rsync -a dist/ ../API/wwwroot/
rsync -a dist/ ../../API/wwwroot/
cd ../ || exit
- name: Get csproj Version
uses: naminodarie/get-net-sdk-project-versions-action@v1
id: get-version
with:
proj-path: Kavita.Common/Kavita.Common.csproj
- name: Echo csproj version
run: echo "${{steps.get-version.outputs.assembly-version}}"
- name: Compile dotnet app
uses: actions/setup-dotnet@v1
with:
dotnet-version: '5.0.x'
- run: ./action-build.sh
- run: ./monorepo-build.sh
- name: Trigger Sentry workflow
uses: benc-uk/workflow-dispatch@v1
with:
workflow: Sentry Map Release
token: ${{ secrets.REPO_GHA_PAT }}
inputs: '{ "version": "${{steps.get-version.outputs.assembly-version}}" }'
- name: Login to Docker Hub
uses: docker/login-action@v1

63
.github/workflows/sentry-map.yml vendored Normal file
View File

@ -0,0 +1,63 @@
name: Sentry Map Release
on:
workflow_dispatch:
inputs:
version:
description: "version to update package.json"
required: true
# No default
jobs:
build:
name: Setup Sentry CLI
runs-on: ubuntu-latest
steps:
- uses: mathieu-bour/setup-sentry-cli@1.2.0
with:
version: latest
token: ${{ secrets.SENTRY_TOKEN }}
organization: kavita-7n
project: angular
- name: Check out repository
uses: actions/checkout@v2
- name: Parse Version
run: |
version='${{ github.event.inputs.version }}'
newVersion=${version%.*}
echo $newVersion
echo "::set-output name=VERSION::$newVersion"
id: parse-version
- name: NodeJS to Compile WebUI
uses: actions/setup-node@v2.1.5
with:
node-version: '14'
- run: |
cd UI/Web || exit
echo 'Installing web dependencies'
npm install
npm version --allow-same-version "${{ steps.parse-version.outputs.VERSION }}"
echo 'Building UI'
npm run prod
- name: Cache dependencies
uses: actions/cache@v2
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Create Release
run: sentry-cli releases new ${{ steps.parse-version.outputs.VERSION }}
- name: Upload Source Maps
run: sentry-cli releases files ${{ steps.parse-version.outputs.VERSION }} upload-sourcemaps UI/Web/dist
- name: Finalize Release
run: sentry-cli releases finalize ${{ steps.parse-version.outputs.VERSION }}

View File

@ -1,4 +1,4 @@
name: .NET Core Build
name: .NET Build Test and Sonar Scan
on:
push:
@ -9,31 +9,34 @@ on:
jobs:
build:
name: Build
name: Build and Scan
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Checkout Repo
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Setup .NET Core
uses: actions/setup-dotnet@v1
with:
dotnet-version: 5.0.100
- name: Install dependencies
run: dotnet restore
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 1.11
- uses: actions/checkout@v2
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- name: Cache SonarCloud packages
uses: actions/cache@v1
with:
path: ~\sonar\cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
- name: Cache SonarCloud scanner
id: cache-sonar-scanner
uses: actions/cache@v1
@ -41,12 +44,14 @@ jobs:
path: .\.sonar\scanner
key: ${{ runner.os }}-sonar-scanner
restore-keys: ${{ runner.os }}-sonar-scanner
- name: Install SonarCloud scanner
if: steps.cache-sonar-scanner.outputs.cache-hit != 'true'
shell: powershell
run: |
New-Item -Path .\.sonar\scanner -ItemType Directory
dotnet tool update dotnet-sonarscanner --tool-path .\.sonar\scanner
- name: Build and analyze
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
@ -56,5 +61,6 @@ jobs:
.\.sonar\scanner\dotnet-sonarscanner begin /k:"Kareadita_Kavita" /o:"kareadita" /d:sonar.login="${{ secrets.SONAR_TOKEN }}" /d:sonar.host.url="https://sonarcloud.io"
dotnet build --configuration Release
.\.sonar\scanner\dotnet-sonarscanner end /d:sonar.login="${{ secrets.SONAR_TOKEN }}"
- name: Test
run: dotnet test --no-restore --verbosity normal

View File

@ -13,20 +13,13 @@ jobs:
- name: Check Out Repo
uses: actions/checkout@v2
- name: Check Out WebUI
uses: actions/checkout@v2
with:
repository: Kareadita/Kavita-webui
ref: main
path: Kavita-webui/
- name: NodeJS to Compile WebUI
uses: actions/setup-node@v2.1.5
with:
node-version: '14'
- run: |
cd Kavita-webui/ || exit
cd UI/Web || exit
echo 'Installing web dependencies'
npm install
@ -34,15 +27,31 @@ jobs:
npm run prod
echo 'Copying back to Kavita wwwroot'
rsync -a dist/ ../API/wwwroot/
rsync -a dist/ ../../API/wwwroot/
cd ../ || exit
- name: Get csproj Version
uses: naminodarie/get-net-sdk-project-versions-action@v1
id: get-version
with:
proj-path: Kavita.Common/Kavita.Common.csproj
- name: Echo csproj version
run: echo "${{steps.get-version.outputs.assembly-version}}"
- name: Compile dotnet app
uses: actions/setup-dotnet@v1
with:
dotnet-version: '5.0.x'
- run: ./action-build.sh
- run: ./monorepo-build.sh
- name: Trigger Sentry workflow
uses: benc-uk/workflow-dispatch@v1
with:
workflow: Sentry Map Release
token: ${{ secrets.REPO_GHA_PAT }}
inputs: '{ "version": "${{steps.get-version.outputs.assembly-version}}" }'
- name: Login to Docker Hub
uses: docker/login-action@v1

48
.gitignore vendored
View File

@ -435,11 +435,54 @@ $RECYCLE.BIN/
##
## Visual Studio Code
##
# See http://help.github.com/ignore-files/ for more about ignoring files.
# compiled output
/UI/Web/dist
/tmp
/out-tsc
# Only exists if Bazel was run
/bazel-out
# dependencies
/node_modules
# profiling files
chrome-profiler-events*.json
speed-measure-plugin*.json
# IDEs and editors
/.idea
.project
.classpath
.c9/
*.launch
.settings/
*.sublime-workspace
# IDE - VSCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
.history/*
# misc
/.sass-cache
/connect.lock
/coverage
/libpeerconnection.log
npm-debug.log
yarn-error.log
testem.log
/typings
# System Files
.DS_Store
Thumbs.db
ssl/
# App specific
appsettings.json
@ -448,10 +491,11 @@ appsettings.json
/API/kavita.db-wal
/API/Hangfire.db
/API/Hangfire-log.db
cache/
API/cache/
/API/wwwroot/
/API/cache/
/API/temp/
_temp/
_output/
stats/
API/stats/
UI/Web/dist/

50
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,50 @@
{
"better-comments.tags": [
{
"tag": "note",
"color": "#FF2D00",
"strikethrough": false,
"underline": false,
"backgroundColor": "transparent",
"bold": true,
"italic": false
},
{
"tag": "?",
"color": "#3498DB",
"strikethrough": false,
"underline": false,
"backgroundColor": "transparent",
"bold": false,
"italic": false
},
{
"tag": "//",
"color": "#474747",
"strikethrough": true,
"underline": false,
"backgroundColor": "transparent",
"bold": false,
"italic": false
},
{
"tag": "todo",
"color": "#FF8C00",
"strikethrough": false,
"underline": false,
"backgroundColor": "transparent",
"bold": true,
"italic": false
},
{
"tag": "*",
"color": "#98C379",
"strikethrough": false,
"underline": false,
"backgroundColor": "transparent",
"bold": false,
"italic": false
}
]
}

View File

@ -1,5 +1,6 @@
using API.Entities;
using API.Extensions;
using API.Parser;
using Xunit;
namespace API.Tests.Extensions
@ -15,6 +16,7 @@ namespace API.Tests.Extensions
[InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot"}, new [] {"salem's lot"}, true)]
// Different normalizations pass as we check normalization against an on-the-fly calculation so we don't delete series just because we change how normalization works
[InlineData(new [] {"Salem's Lot", "Salem's Lot", "Salem's Lot", "salems lot"}, new [] {"salem's lot"}, true)]
[InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, new [] {"Kanojo, Okarishimasu"}, true)]
public void NameInListTest(string[] seriesInput, string[] list, bool expected)
{
var series = new Series()
@ -28,5 +30,27 @@ namespace API.Tests.Extensions
Assert.Equal(expected, series.NameInList(list));
}
[Theory]
[InlineData(new [] {"Darker than Black", "Darker Than Black", "Darker than Black"}, "Darker than Black", true)]
[InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Kanojo, Okarishimasu", true)]
[InlineData(new [] {"Rent-a-Girlfriend", "Rent-a-Girlfriend", "Kanojo, Okarishimasu", "rentagirlfriend"}, "Rent", false)]
public void NameInParserInfoTest(string[] seriesInput, string parserSeries, bool expected)
{
var series = new Series()
{
Name = seriesInput[0],
LocalizedName = seriesInput[1],
OriginalName = seriesInput[2],
NormalizedName = seriesInput.Length == 4 ? seriesInput[3] : API.Parser.Parser.Normalize(seriesInput[0]),
Metadata = new SeriesMetadata()
};
var info = new ParserInfo();
info.Series = parserSeries;
Assert.Equal(expected, series.NameInParserInfo(info));
}
}
}

View File

@ -146,6 +146,14 @@ namespace API.Tests.Parser
[InlineData("Kodoja #001 (March 2016)", "Kodoja")]
[InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")]
[InlineData("Kiss x Sis - Ch.36 - A Cold Home Visit.cbz", "Kiss x Sis")]
[InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ)", "Seraph of the End - Vampire Reign")]
[InlineData("Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz", "Grand Blue Dreaming")]
[InlineData("Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz", "Yuusha Ga Shinda!")]
[InlineData("Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", "Seraph of the End - Vampire Reign")]
[InlineData("Getsuyoubi no Tawawa - Ch. 001 - Ai-chan, Part 1", "Getsuyoubi no Tawawa")]
[InlineData("Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz", "Please Go Home, Akutsu-San!")]
[InlineData("Killing Bites - Vol 11 Chapter 050 Save Me, Nunupi!.cbz", "Killing Bites")]
[InlineData("Mad Chimera World - Volume 005 - Chapter 026.cbz", "Mad Chimera World")]
public void ParseSeriesTest(string filename, string expected)
{
Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename));
@ -206,13 +214,14 @@ namespace API.Tests.Parser
[InlineData("Kiss x Sis - Ch.00 - Let's Start from 0.cbz", "0")]
[InlineData("[Hidoi]_Amaenaideyo_MS_vol01_chp02.rar", "2")]
[InlineData("Okusama wa Shougakusei c003 (v01) [bokuwaNEET]", "3")]
[InlineData("Kiss x Sis - Ch.15 - The Angst of a 15 Year Old Boy.cbz", "15")]
[InlineData("Tomogui Kyoushitsu - Chapter 006 Game 005 - Fingernails On Right Hand (Part 002).cbz", "6")]
[InlineData("Noblesse - Episode 406 (52 Pages).7z", "406")]
[InlineData("X-Men v1 #201 (September 2007).cbz", "201")]
[InlineData("Kodoja #001 (March 2016)", "1")]
[InlineData("Noblesse - Episode 429 (74 Pages).7z", "429")]
[InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "54")]
[InlineData("Ijousha No Ai - Vol.01 Chapter 029 8 Years Ago", "29")]
[InlineData("Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz", "9")]
public void ParseChaptersTest(string filename, string expected)
{
Assert.Equal(expected, API.Parser.Parser.ParseChapter(filename));
@ -266,6 +275,32 @@ namespace API.Tests.Parser
Assert.Equal(expected, API.Parser.Parser.ParseMangaSpecial(inputFile));
}
private static ParserInfo CreateParserInfo(string series, string chapter, string volume, bool isSpecial = false)
{
return new ParserInfo()
{
Chapters = chapter,
Volumes = volume,
IsSpecial = isSpecial,
Series = series,
};
}
[Theory]
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
[InlineData("/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Btooom!~1~2")]
public void ParseFromFallbackFoldersTest(string inputFile, string expectedParseInfo)
{
const string rootDirectory = "/manga/";
var tokens = expectedParseInfo.Split("~");
var actual = new ParserInfo {Chapters = "0", Volumes = "0"};
API.Parser.Parser.ParseFromFallbackFolders(inputFile, rootDirectory, LibraryType.Manga, ref actual);
Assert.Equal(tokens[0], actual.Series);
Assert.Equal(tokens[1], actual.Volumes);
Assert.Equal(tokens[2], actual.Chapters);
}
[Fact]
public void ParseInfoTest()
{
@ -351,6 +386,22 @@ namespace API.Tests.Parser
FullFilePath = filepath
});
filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Summer Time Rendering", Volumes = "0", Edition = "",
Chapters = "0", Filename = "Record 014 (between chapter 083 and ch084) SP11.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = true
});
filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
expected.Add(filepath, new ParserInfo
{
Series = "Seraph of the End - Vampire Reign", Volumes = "0", Edition = "",
Chapters = "93", Filename = "Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{

View File

@ -75,7 +75,7 @@ namespace API.Tests.Parser
[Theory]
[InlineData("test.epub", true)]
[InlineData("test.pdf", false)]
[InlineData("test.pdf", true)]
[InlineData("test.mobi", false)]
[InlineData("test.djvu", false)]
[InlineData("test.zip", false)]

View File

@ -1,5 +1,5 @@
using System.IO;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Microsoft.Extensions.Logging;
using NSubstitute;

View File

@ -1,4 +1,5 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using API.Services;
@ -23,6 +24,7 @@ namespace API.Tests.Services
public void GetFilesTest_Should_Be28()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ScannerService/Manga");
// ReSharper disable once CollectionNeverQueried.Local
var files = new List<string>();
var fileCount = DirectoryService.TraverseTreeParallelForEach(testDirectory, s => files.Add(s),
API.Parser.Parser.ArchiveFileExtensions, _logger);
@ -93,9 +95,20 @@ namespace API.Tests.Services
[InlineData("C:/Manga", "C:/Manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")]
[InlineData("C:/Manga", @"C:\Manga\Love Hina\Specials\Omake\", "Omake,Specials,Love Hina")]
[InlineData(@"/manga/", @"/manga/Love Hina/Specials/Omake/", "Omake,Specials,Love Hina")]
[InlineData(@"/manga/", @"/manga/", "")]
[InlineData(@"E:\test", @"E:\test\Sweet X Trouble\Sweet X Trouble - Chapter 001.cbz", "Sweet X Trouble")]
[InlineData(@"C:\/mount/gdrive/Library/Test Library/Comics/", @"C:\/mount/gdrive/Library/Test Library/Comics\godzilla rivals vs hedorah\vol 1\", "vol 1,godzilla rivals vs hedorah")]
[InlineData(@"/manga/", @"/manga/Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")]
[InlineData(@"C:/", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")]
[InlineData(@"C:\\", @"C://Btooom!/Vol.1 Chapter 2/1.cbz", "Vol.1 Chapter 2,Btooom!")]
[InlineData(@"C://mount/gdrive/Library/Test Library/Comics", @"C://mount/gdrive/Library/Test Library/Comics/Dragon Age/Test", "Test,Dragon Age")]
public void GetFoldersTillRoot_Test(string rootPath, string fullpath, string expectedArray)
{
var expected = expectedArray.Split(",");
if (expectedArray.Equals(string.Empty))
{
expected = Array.Empty<string>();
}
Assert.Equal(expected, DirectoryService.GetFoldersTillRoot(rootPath, fullpath));
}
}

View File

@ -7,11 +7,13 @@ using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services;
using API.Services.Tasks;
using API.Services.Tasks.Scanner;
using API.Tests.Helpers;
using AutoMapper;
using Microsoft.Data.Sqlite;
@ -20,24 +22,25 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Services
{
public class ScannerServiceTests : IDisposable
{
private readonly ITestOutputHelper _testOutputHelper;
private readonly ScannerService _scannerService;
private readonly ILogger<ScannerService> _logger = Substitute.For<ILogger<ScannerService>>();
private readonly IArchiveService _archiveService = Substitute.For<IArchiveService>();
private readonly IBookService _bookService = Substitute.For<IBookService>();
private readonly IImageService _imageService = Substitute.For<IImageService>();
private readonly ILogger<MetadataService> _metadataLogger = Substitute.For<ILogger<MetadataService>>();
private readonly IDirectoryService _directoryService = Substitute.For<IDirectoryService>();
private readonly ICacheService _cacheService = Substitute.For<ICacheService>();
private readonly DbConnection _connection;
private readonly DataContext _context;
public ScannerServiceTests(ITestOutputHelper testOutputHelper)
public ScannerServiceTests()
{
var contextOptions = new DbContextOptionsBuilder()
.UseSqlite(CreateInMemoryDatabase())
@ -47,21 +50,11 @@ namespace API.Tests.Services
_context = new DataContext(contextOptions);
Task.Run(SeedDb).GetAwaiter().GetResult();
//BackgroundJob.Enqueue is what I need to mock or something (it's static...)
// ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
// IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
// IBackgroundJobClient jobClient
//var taskScheduler = new TaskScheduler(Substitute.For<ICacheService>(), Substitute.For<ILogger<TaskScheduler>>(), Substitute.For<)
// Substitute.For<UserManager<AppUser>>() - Not needed because only for UserService
IUnitOfWork unitOfWork = new UnitOfWork(_context, Substitute.For<IMapper>(), null);
_testOutputHelper = testOutputHelper;
IMetadataService metadataService = Substitute.For<MetadataService>(unitOfWork, _metadataLogger, _archiveService, _bookService);
_scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService);
IMetadataService metadataService = Substitute.For<MetadataService>(unitOfWork, _metadataLogger, _archiveService, _bookService, _imageService);
_scannerService = new ScannerService(unitOfWork, _logger, _archiveService, metadataService, _bookService, _cacheService);
}
private async Task<bool> SeedDb()
@ -83,66 +76,64 @@ namespace API.Tests.Services
return await _context.SaveChangesAsync() > 0;
}
// [Fact]
// public void Test()
// {
// _scannerService.ScanLibrary(1, false);
//
// var series = _unitOfWork.LibraryRepository.GetLibraryForIdAsync(1).Result.Series;
// }
[Fact]
public void FindSeriesNotOnDisk_Should_RemoveNothing_Test()
{
var infos = new Dictionary<string, List<ParserInfo>>();
var infos = new Dictionary<ParsedSeries, List<ParserInfo>>();
AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10"});
AddToParsedInfo(infos, new ParserInfo() {Series = "Darker than Black", Format = MangaFormat.Archive});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "1", Format = MangaFormat.Archive});
AddToParsedInfo(infos, new ParserInfo() {Series = "Cage of Eden", Volumes = "10", Format = MangaFormat.Archive});
var existingSeries = new List<Series>();
existingSeries.Add(new Series()
var existingSeries = new List<Series>
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = API.Parser.Parser.Normalize("Cage of Eden"),
Metadata = new SeriesMetadata()
});
existingSeries.Add(new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = API.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata()
});
new Series()
{
Name = "Cage of Eden",
LocalizedName = "Cage of Eden",
OriginalName = "Cage of Eden",
NormalizedName = API.Parser.Parser.Normalize("Cage of Eden"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
},
new Series()
{
Name = "Darker Than Black",
LocalizedName = "Darker Than Black",
OriginalName = "Darker Than Black",
NormalizedName = API.Parser.Parser.Normalize("Darker Than Black"),
Metadata = new SeriesMetadata(),
Format = MangaFormat.Archive
}
};
Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
[Theory]
[InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
[InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
[InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
[InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
{
var collectedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
foreach (var seriesName in existingSeriesNames)
{
AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName});
}
var actualName = _scannerService.MergeName(collectedSeries, new ParserInfo()
{
Series = parsedInfoName
});
Assert.Equal(expected, actualName);
}
// TODO: Figure out how to do this with ParseScannedFiles
// [Theory]
// [InlineData(new [] {"Darker than Black"}, "Darker than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black", "Darker than Black")]
// [InlineData(new [] {"Darker than Black"}, "Darker Than Black!", "Darker than Black")]
// [InlineData(new [] {""}, "Runaway Jack", "Runaway Jack")]
// public void MergeNameTest(string[] existingSeriesNames, string parsedInfoName, string expected)
// {
// var collectedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
// foreach (var seriesName in existingSeriesNames)
// {
// AddToParsedInfo(collectedSeries, new ParserInfo() {Series = seriesName, Format = MangaFormat.Archive});
// }
//
// var actualName = new ParseScannedFiles(_bookService, _logger).MergeName(collectedSeries, new ParserInfo()
// {
// Series = parsedInfoName,
// Format = MangaFormat.Archive
// });
//
// Assert.Equal(expected, actualName);
// }
[Fact]
public void RemoveMissingSeries_Should_RemoveSeries()
@ -163,11 +154,19 @@ namespace API.Tests.Services
Assert.Equal(missingSeries.Count, removeCount);
}
private void AddToParsedInfo(IDictionary<string, List<ParserInfo>> collectedSeries, ParserInfo info)
private void AddToParsedInfo(IDictionary<ParsedSeries, List<ParserInfo>> collectedSeries, ParserInfo info)
{
var existingKey = collectedSeries.Keys.FirstOrDefault(ps =>
ps.Format == info.Format && ps.NormalizedName == API.Parser.Parser.Normalize(info.Series));
existingKey ??= new ParsedSeries()
{
Format = info.Format,
Name = info.Series,
NormalizedName = API.Parser.Parser.Normalize(info.Series)
};
if (collectedSeries.GetType() == typeof(ConcurrentDictionary<,>))
{
((ConcurrentDictionary<string, List<ParserInfo>>) collectedSeries).AddOrUpdate(info.Series, new List<ParserInfo>() {info}, (_, oldValue) =>
((ConcurrentDictionary<ParsedSeries, List<ParserInfo>>) collectedSeries).AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
{
oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info))
@ -180,84 +179,25 @@ namespace API.Tests.Services
}
else
{
if (!collectedSeries.ContainsKey(info.Series))
if (!collectedSeries.ContainsKey(existingKey))
{
collectedSeries.Add(info.Series, new List<ParserInfo>() {info});
collectedSeries.Add(existingKey, new List<ParserInfo>() {info});
}
else
{
var list = collectedSeries[info.Series];
var list = collectedSeries[existingKey];
if (!list.Contains(info))
{
list.Add(info);
}
collectedSeries[info.Series] = list;
collectedSeries[existingKey] = list;
}
}
}
// [Fact]
// public void ExistingOrDefault_Should_BeFromLibrary()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black").Id);
// Assert.Equal(_libraryMock.Series.ElementAt(0).Id, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker than Black").Id);
// }
//
// [Fact]
// public void ExistingOrDefault_Should_BeFromAllSeries()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Equal(3, ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Darker Than Black - Some Extension").Id);
// }
//
// [Fact]
// public void ExistingOrDefault_Should_BeNull()
// {
// var allSeries = new List<Series>()
// {
// new Series() {Id = 2, Name = "Darker Than Black"},
// new Series() {Id = 3, Name = "Darker Than Black - Some Extension"},
// new Series() {Id = 4, Name = "Akame Ga Kill"},
// };
// Assert.Null(ScannerService.ExistingOrDefault(_libraryMock, allSeries, "Non existing series"));
// }
[Fact]
public void Should_CreateSeries_Test()
{
// var allSeries = new List<Series>();
// var parsedSeries = new Dictionary<string, List<ParserInfo>>();
//
// parsedSeries.Add("Darker Than Black", new List<ParserInfo>()
// {
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker Than Black", Volumes = "1"},
// new ParserInfo() {Chapters = "0", Filename = "Something.cbz", Format = MangaFormat.Archive, FullFilePath = "E:/Manga/Something.cbz", Series = "Darker than Black", Volumes = "2"}
// });
//
// _scannerService.UpsertSeries(_libraryMock, parsedSeries, allSeries);
//
// Assert.Equal(1, _libraryMock.Series.Count);
// Assert.Equal(2, _libraryMock.Series.ElementAt(0).Volumes.Count);
// _testOutputHelper.WriteLine(_libraryMock.ToString());
Assert.True(true);
}
private static DbConnection CreateInMemoryDatabase()
{
var connection = new SqliteConnection("Filename=:memory:");

View File

@ -16,7 +16,7 @@
<PropertyGroup>
<Product>Kavita</Product>
<Company>kareadita.github.io</Company>
<Copyright>Copyright 2020-$([System.DateTime]::Now.ToString('yyyy')) kareadita.github.io (GNU General Public v3)</Copyright>
<Copyright>Copyright 2020-$([System.DateTime]::Now.ToString('yyyy')) kavitareader.com (GNU General Public v3)</Copyright>
<!-- Should be replaced by CI -->
<AssemblyVersion>0.4.1</AssemblyVersion>
@ -31,6 +31,7 @@
<ItemGroup>
<PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="8.1.1" />
<PackageReference Include="Docnet.Core" Version="2.3.1" />
<PackageReference Include="ExCSS" Version="4.1.0" />
<PackageReference Include="Hangfire" Version="1.7.20" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.20" />
@ -47,20 +48,27 @@
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.4" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.1" />
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="2.0.0" />
<PackageReference Include="NetVips" Version="2.0.0" />
<PackageReference Include="NetVips.Native" Version="8.10.6" />
<PackageReference Include="NetVips" Version="2.0.1" />
<PackageReference Include="NetVips.Native" Version="8.11.0" />
<PackageReference Include="NReco.Logging.File" Version="1.1.1" />
<PackageReference Include="Sentry.AspNetCore" Version="3.3.4" />
<PackageReference Include="SharpCompress" Version="0.28.1" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.20.0.28934">
<PackageReference Include="Sentry.AspNetCore" Version="3.8.2" />
<PackageReference Include="SharpCompress" Version="0.28.3" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.26.0.34506">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.1.1" />
<PackageReference Include="System.Drawing.Common" Version="5.0.2" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="6.10.0" />
<PackageReference Include="VersOne.Epub" Version="3.0.3.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Kavita.Common\Kavita.Common.csproj" />
</ItemGroup>
<ItemGroup>
<None Remove="Hangfire-log.db" />
<None Remove="obj\**" />
@ -207,12 +215,4 @@
<_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js.map" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Kavita.Common\Kavita.Common.csproj" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Kavita.Common\Kavita.Common.csproj" />
</ItemGroup>
</Project>

View File

@ -15,4 +15,25 @@ namespace API.Comparators
return x.CompareTo(y);
}
}
/// <summary>
/// This is a special case comparer used exclusively for sorting chapters within a single Volume for reading order.
/// <example>
/// Volume 10 has "Series - Vol 10" and "Series - Vol 10 Chapter 81". In this case, for reading order, the order is Vol 10, Vol 10 Chapter 81.
/// This is represented by Chapter 0, Chapter 81.
/// </example>
/// </summary>
public class ChapterSortComparerZeroFirst : IComparer<double>
{
public int Compare(double x, double y)
{
if (x == 0.0 && y == 0.0) return 0;
// if x is 0, it comes first
if (x == 0.0) return -1;
// if y is 0, it comes first
if (y == 0.0) return 1;
return x.CompareTo(y);
}
}
}

View File

@ -1,30 +0,0 @@
using System;
namespace API.Configurations.CustomOptions
{
public class StatsOptions
{
public string ServerUrl { get; set; }
public string ServerSecret { get; set; }
public string SendDataAt { get; set; }
private const char Separator = ':';
public short SendDataHour => GetValueFromSendAt(0);
public short SendDataMinute => GetValueFromSendAt(1);
// The expected SendDataAt format is: Hour:Minute. Ex: 19:45
private short GetValueFromSendAt(int index)
{
var key = $"{nameof(StatsOptions)}:{nameof(SendDataAt)}";
if (string.IsNullOrEmpty(SendDataAt))
throw new InvalidOperationException($"{key} is invalid. Check the app settings file");
if (short.TryParse(SendDataAt.Split(Separator)[index], out var parsedValue))
return parsedValue;
throw new InvalidOperationException($"Could not parse {key}. Check the app settings file");
}
}
}

View File

@ -4,6 +4,7 @@ using System.Threading.Tasks;
using API.DTOs;
using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using HtmlAgilityPack;
using Microsoft.AspNetCore.Mvc;
@ -69,8 +70,7 @@ namespace API.Controllers
{
if (navigationItem.NestedItems.Count > 0)
{
_logger.LogDebug("Header: {Header}", navigationItem.Title);
var nestedChapters = new List<BookChapterItem>();
var nestedChapters = new List<BookChapterItem>();
foreach (var nestedChapter in navigationItem.NestedItems)
{

View File

@ -85,11 +85,12 @@ namespace API.Controllers
".epub" => "application/epub+zip",
".7z" => "application/x-7z-compressed",
".7zip" => "application/x-7z-compressed",
".pdf" => "application/pdf",
_ => contentType
};
}
return File(await _directoryService.ReadFileAsync(firstFile), contentType, Path.GetFileNameWithoutExtension(firstFile));
return File(await _directoryService.ReadFileAsync(firstFile), contentType, Path.GetFileName(firstFile));
}
[HttpGet("chapter")]

View File

@ -176,13 +176,25 @@ namespace API.Controllers
var chapterIds =
await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(seriesIds);
var result = await _unitOfWork.LibraryRepository.DeleteLibrary(libraryId);
if (result && chapterIds.Any())
{
_taskScheduler.CleanupChapters(chapterIds);
}
return Ok(result);
try
{
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
_unitOfWork.LibraryRepository.Delete(library);
await _unitOfWork.CommitAsync();
if (chapterIds.Any())
{
_taskScheduler.CleanupChapters(chapterIds);
}
return Ok(true);
}
catch (Exception ex)
{
_logger.LogError(ex, "There was a critical error trying to delete the library");
await _unitOfWork.RollbackAsync();
return Ok(false);
}
}
[Authorize(Policy = "RequireAdminRole")]

View File

@ -11,7 +11,6 @@ using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace API.Controllers
{
@ -19,16 +18,15 @@ namespace API.Controllers
{
private readonly IDirectoryService _directoryService;
private readonly ICacheService _cacheService;
private readonly ILogger<ReaderController> _logger;
private readonly IUnitOfWork _unitOfWork;
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
private readonly ChapterSortComparerZeroFirst _chapterSortComparerForInChapterSorting = new ChapterSortComparerZeroFirst();
private readonly NaturalSortComparer _naturalSortComparer = new NaturalSortComparer();
public ReaderController(IDirectoryService directoryService, ICacheService cacheService,
ILogger<ReaderController> logger, IUnitOfWork unitOfWork)
public ReaderController(IDirectoryService directoryService, ICacheService cacheService, IUnitOfWork unitOfWork)
{
_directoryService = directoryService;
_cacheService = cacheService;
_logger = logger;
_unitOfWork = unitOfWork;
}
@ -54,6 +52,7 @@ namespace API.Controllers
[HttpGet("chapter-info")]
public async Task<ActionResult<ChapterInfoDto>> GetChapterInfo(int chapterId)
{
// PERF: Write this in one DB call
var chapter = await _cacheService.Ensure(chapterId);
if (chapter == null) return BadRequest("Could not find Chapter");
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId);
@ -183,7 +182,6 @@ namespace API.Controllers
public async Task<ActionResult> MarkVolumeAsRead(MarkVolumeReadDto markVolumeReadDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
_logger.LogDebug("Saving {UserName} progress for Volume {VolumeID} to read", user.UserName, markVolumeReadDto.VolumeId);
var chapters = await _unitOfWork.VolumeRepository.GetChaptersAsync(markVolumeReadDto.VolumeId);
foreach (var chapter in chapters)
@ -223,7 +221,6 @@ namespace API.Controllers
public async Task<ActionResult> Bookmark(BookmarkDto bookmarkDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
_logger.LogDebug("Saving {UserName} progress for Chapter {ChapterId} to page {PageNum}", user.UserName, bookmarkDto.ChapterId, bookmarkDto.PageNum);
// Don't let user bookmark past total pages.
var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(bookmarkDto.ChapterId);
@ -238,35 +235,43 @@ namespace API.Controllers
}
user.Progresses ??= new List<AppUserProgress>();
var userProgress = user.Progresses.SingleOrDefault(x => x.ChapterId == bookmarkDto.ChapterId && x.AppUserId == user.Id);
if (userProgress == null)
try
{
user.Progresses.Add(new AppUserProgress
{
PagesRead = bookmarkDto.PageNum,
VolumeId = bookmarkDto.VolumeId,
SeriesId = bookmarkDto.SeriesId,
ChapterId = bookmarkDto.ChapterId,
BookScrollId = bookmarkDto.BookScrollId,
LastModified = DateTime.Now
});
user.Progresses ??= new List<AppUserProgress>();
var userProgress =
user.Progresses.SingleOrDefault(x => x.ChapterId == bookmarkDto.ChapterId && x.AppUserId == user.Id);
if (userProgress == null)
{
user.Progresses.Add(new AppUserProgress
{
PagesRead = bookmarkDto.PageNum,
VolumeId = bookmarkDto.VolumeId,
SeriesId = bookmarkDto.SeriesId,
ChapterId = bookmarkDto.ChapterId,
BookScrollId = bookmarkDto.BookScrollId,
LastModified = DateTime.Now
});
}
else
{
userProgress.PagesRead = bookmarkDto.PageNum;
userProgress.SeriesId = bookmarkDto.SeriesId;
userProgress.VolumeId = bookmarkDto.VolumeId;
userProgress.BookScrollId = bookmarkDto.BookScrollId;
userProgress.LastModified = DateTime.Now;
}
_unitOfWork.UserRepository.Update(user);
if (await _unitOfWork.CommitAsync())
{
return Ok();
}
}
else
catch (Exception)
{
userProgress.PagesRead = bookmarkDto.PageNum;
userProgress.SeriesId = bookmarkDto.SeriesId;
userProgress.VolumeId = bookmarkDto.VolumeId;
userProgress.BookScrollId = bookmarkDto.BookScrollId;
userProgress.LastModified = DateTime.Now;
}
_unitOfWork.UserRepository.Update(user);
if (await _unitOfWork.CommitAsync())
{
return Ok();
await _unitOfWork.RollbackAsync();
}
return BadRequest("Could not save progress");
@ -275,6 +280,9 @@ namespace API.Controllers
/// <summary>
/// Returns the next logical chapter from the series.
/// </summary>
/// <example>
/// V1 → V2 → V3 chapter 0 → V3 chapter 10 → SP 01 → SP 02
/// </example>
/// <param name="seriesId"></param>
/// <param name="volumeId"></param>
/// <param name="currentChapterId"></param>
@ -288,7 +296,8 @@ namespace API.Controllers
var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId);
if (currentVolume.Number == 0)
{
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number);
// Handle specials by sorting on their Filename aka Range
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, _naturalSortComparer), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId);
}
@ -296,13 +305,23 @@ namespace API.Controllers
{
if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1)
{
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number);
// Handle Chapters within current Volume
// In this case, i need 0 first because 0 represents a full volume file.
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId);
}
if (volume.Number == currentVolume.Number + 1)
{
return Ok(volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).FirstOrDefault()?.Id);
// Handle Chapters within next Volume
// ! When selecting the chapter for the next volume, we need to make sure a c0 comes before a c1+
var chapters = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).ToList();
if (currentChapter.Number.Equals("0") && chapters.Last().Number.Equals("0"))
{
return chapters.Last().Id;
}
return Ok(chapters.FirstOrDefault()?.Id);
}
}
return Ok(-1);
@ -311,7 +330,8 @@ namespace API.Controllers
private static int GetNextChapterId(IEnumerable<Chapter> chapters, string currentChapterNumber)
{
var next = false;
foreach (var chapter in chapters)
var chaptersList = chapters.ToList();
foreach (var chapter in chaptersList)
{
if (next)
{
@ -326,6 +346,9 @@ namespace API.Controllers
/// <summary>
/// Returns the previous logical chapter from the series.
/// </summary>
/// <example>
/// V1 ← V2 ← V3 chapter 0 ← V3 chapter 10 ← SP 01 ← SP 02
/// </example>
/// <param name="seriesId"></param>
/// <param name="volumeId"></param>
/// <param name="currentChapterId"></param>
@ -340,7 +363,7 @@ namespace API.Controllers
if (currentVolume.Number == 0)
{
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number);
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => x.Range, _naturalSortComparer).Reverse(), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId);
}
@ -348,12 +371,12 @@ namespace API.Controllers
{
if (volume.Number == currentVolume.Number)
{
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number);
var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).Reverse(), currentChapter.Number);
if (chapterId > 0) return Ok(chapterId);
}
if (volume.Number == currentVolume.Number - 1)
{
return Ok(volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).LastOrDefault()?.Id);
return Ok(volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).LastOrDefault()?.Id);
}
}
return Ok(-1);

View File

@ -180,6 +180,14 @@ namespace API.Controllers
return Ok();
}
[Authorize(Policy = "RequireAdminRole")]
[HttpPost("scan")]
public ActionResult ScanSeries(RefreshSeriesDto refreshSeriesDto)
{
_taskScheduler.ScanSeries(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId);
return Ok();
}
[HttpGet("metadata")]
public async Task<ActionResult<SeriesMetadataDto>> GetSeriesMetadata(int seriesId)
{

View File

@ -1,8 +1,10 @@
using System;
using System.IO;
using System.Threading.Tasks;
using API.DTOs.Stats;
using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks;
using Kavita.Common;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
@ -20,15 +22,17 @@ namespace API.Controllers
private readonly IConfiguration _config;
private readonly IBackupService _backupService;
private readonly IArchiveService _archiveService;
private readonly ICacheService _cacheService;
public ServerController(IHostApplicationLifetime applicationLifetime, ILogger<ServerController> logger, IConfiguration config,
IBackupService backupService, IArchiveService archiveService)
IBackupService backupService, IArchiveService archiveService, ICacheService cacheService)
{
_applicationLifetime = applicationLifetime;
_logger = logger;
_config = config;
_backupService = backupService;
_archiveService = archiveService;
_cacheService = cacheService;
}
[HttpPost("restart")]
@ -40,6 +44,25 @@ namespace API.Controllers
return Ok();
}
[HttpPost("clear-cache")]
public ActionResult ClearCache()
{
_logger.LogInformation("{UserName} is clearing cache of server from admin dashboard", User.GetUsername());
_cacheService.Cleanup();
return Ok();
}
/// <summary>
/// Returns non-sensitive information about the current system
/// </summary>
/// <returns></returns>
[HttpGet("server-info")]
public ActionResult<ServerInfoDto> GetVersion()
{
return Ok(StatsService.GetServerInfo());
}
[HttpGet("logs")]
public async Task<ActionResult> GetLogs()
{

View File

@ -1,17 +1,17 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.Entities.Enums;
using API.Extensions;
using API.Helpers.Converters;
using API.Interfaces;
using Kavita.Common;
using Kavita.Common.Extensions;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
namespace API.Controllers
@ -22,26 +22,24 @@ namespace API.Controllers
private readonly ILogger<SettingsController> _logger;
private readonly IUnitOfWork _unitOfWork;
private readonly ITaskScheduler _taskScheduler;
private readonly IConfiguration _configuration;
public SettingsController(ILogger<SettingsController> logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler, IConfiguration configuration)
public SettingsController(ILogger<SettingsController> logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler)
{
_logger = logger;
_unitOfWork = unitOfWork;
_taskScheduler = taskScheduler;
_configuration = configuration;
}
[HttpGet("")]
[HttpGet]
public async Task<ActionResult<ServerSettingDto>> GetSettings()
{
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
settingsDto.Port = Configuration.GetPort(Program.GetAppSettingFilename());
settingsDto.LoggingLevel = Configuration.GetLogLevel(Program.GetAppSettingFilename());
settingsDto.Port = Configuration.Port;
settingsDto.LoggingLevel = Configuration.LogLevel;
return Ok(settingsDto);
}
[HttpPost("")]
[HttpPost]
public async Task<ActionResult<ServerSettingDto>> UpdateSettings(ServerSettingDto updateSettingsDto)
{
_logger.LogInformation("{UserName} is updating Server Settings", User.GetUsername());
@ -59,9 +57,6 @@ namespace API.Controllers
// We do not allow CacheDirectory changes, so we will ignore.
var currentSettings = await _unitOfWork.SettingsRepository.GetSettingsAsync();
var logLevelOptions = new LogLevelOptions();
_configuration.GetSection("Logging:LogLevel").Bind(logLevelOptions);
foreach (var setting in currentSettings)
{
if (setting.Key == ServerSettingKey.TaskBackup && updateSettingsDto.TaskBackup != setting.Value)
@ -76,24 +71,24 @@ namespace API.Controllers
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + "" != setting.Value)
if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.Port + "";
setting.Value = updateSettingsDto.Port + string.Empty;
// Port is managed in appSetting.json
Configuration.UpdatePort(Program.GetAppSettingFilename(), updateSettingsDto.Port);
Configuration.Port = updateSettingsDto.Port;
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + "" != setting.Value)
if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.LoggingLevel + "";
Configuration.UpdateLogLevel(Program.GetAppSettingFilename(), updateSettingsDto.LoggingLevel);
setting.Value = updateSettingsDto.LoggingLevel + string.Empty;
Configuration.LogLevel = updateSettingsDto.LoggingLevel;
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + "" != setting.Value)
if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.AllowStatCollection + "";
setting.Value = updateSettingsDto.AllowStatCollection + string.Empty;
_unitOfWork.SettingsRepository.Update(setting);
if (!updateSettingsDto.AllowStatCollection)
{
@ -106,7 +101,6 @@ namespace API.Controllers
}
}
_configuration.GetSection("Logging:LogLevel:Default").Value = updateSettingsDto.LoggingLevel + "";
if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated");
if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync())
@ -129,7 +123,7 @@ namespace API.Controllers
[HttpGet("library-types")]
public ActionResult<IEnumerable<string>> GetLibraryTypes()
{
return Ok(Enum.GetNames(typeof(LibraryType)));
return Ok(Enum.GetValues<LibraryType>().Select(t => t.ToDescription()));
}
[HttpGet("log-levels")]

View File

@ -1,6 +1,6 @@
using System;
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Stats;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;

View File

@ -1,4 +1,6 @@
namespace API.DTOs
using API.Entities.Enums;
namespace API.DTOs
{
public class SearchResultDto
{
@ -7,6 +9,7 @@
public string OriginalName { get; init; }
public string SortName { get; init; }
public string LocalizedName { get; init; }
public MangaFormat Format { get; init; }
// Grouping information
public string LibraryName { get; set; }

View File

@ -1,4 +1,5 @@
using System;
using API.Entities.Enums;
namespace API.DTOs
{
@ -23,6 +24,7 @@ namespace API.DTOs
/// Review from logged in user. Calculated at API-time.
/// </summary>
public string UserReview { get; set; }
public MangaFormat Format { get; set; }
public DateTime Created { get; set; }

View File

@ -1,6 +1,6 @@
using System;
namespace API.DTOs
namespace API.DTOs.Stats
{
public class ClientInfoDto
{
@ -16,13 +16,14 @@ namespace API.DTOs
public DetailsVersion Os { get; set; }
public DateTime? CollectedAt { get; set; }
public bool UsingDarkTheme { get; set; }
public bool IsTheSameDevice(ClientInfoDto clientInfoDto)
{
return (clientInfoDto.ScreenResolution ?? "").Equals(ScreenResolution) &&
(clientInfoDto.PlatformType ?? "").Equals(PlatformType) &&
(clientInfoDto.Browser?.Name ?? "").Equals(Browser?.Name) &&
(clientInfoDto.Os?.Name ?? "").Equals(Os?.Name) &&
return (clientInfoDto.ScreenResolution ?? string.Empty).Equals(ScreenResolution) &&
(clientInfoDto.PlatformType ?? string.Empty).Equals(PlatformType) &&
(clientInfoDto.Browser?.Name ?? string.Empty).Equals(Browser?.Name) &&
(clientInfoDto.Os?.Name ?? string.Empty).Equals(Os?.Name) &&
clientInfoDto.CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd")
.Equals(CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd"));
}

View File

@ -1,4 +1,4 @@
namespace API.DTOs
namespace API.DTOs.Stats
{
public class ServerInfoDto
{
@ -8,5 +8,7 @@
public string KavitaVersion { get; set; }
public string BuildBranch { get; set; }
public string Culture { get; set; }
public bool IsDocker { get; set; }
public int NumOfCores { get; set; }
}
}

View File

@ -1,7 +1,7 @@
using System.Collections.Generic;
using API.Entities.Enums;
namespace API.DTOs
namespace API.DTOs.Stats
{
public class UsageInfoDto
{

View File

@ -2,7 +2,7 @@
using System.Collections.Generic;
using System.Linq;
namespace API.DTOs
namespace API.DTOs.Stats
{
public class UsageStatisticsDto
{

View File

@ -45,7 +45,7 @@ namespace API.Data
{
Number = specialTreatment ? "0" : Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty,
Range = specialTreatment ? info.Filename : info.Chapters,
Title = (specialTreatment && info.Format == MangaFormat.Book)
Title = (specialTreatment && info.Format == MangaFormat.Epub)
? info.Title
: specialTitle,
Files = new List<MangaFile>(),

View File

@ -32,6 +32,11 @@ namespace API.Data
_context.Entry(library).State = EntityState.Modified;
}
public void Delete(Library library)
{
_context.Library.Remove(library);
}
public async Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName)
{
return await _context.Library
@ -115,6 +120,28 @@ namespace API.Data
.SingleAsync();
}
/// <summary>
/// This is a heavy call, pulls all entities for a Library, except this version only grabs for one series id
/// </summary>
/// <param name="libraryId"></param>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId)
{
return await _context.Library
.Where(x => x.Id == libraryId)
.Include(f => f.Folders)
.Include(l => l.Series.Where(s => s.Id == seriesId))
.ThenInclude(s => s.Metadata)
.Include(l => l.Series.Where(s => s.Id == seriesId))
.ThenInclude(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.AsSplitQuery()
.SingleAsync();
}
public async Task<bool> LibraryExists(string libraryName)
{
return await _context.Library

View File

@ -0,0 +1,872 @@
// <auto-generated />
using System;
using API.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace API.Data.Migrations
{
[DbContext(typeof(DataContext))]
[Migration("20210722223304_AddedSeriesFormat")]
partial class AddedSeriesFormat
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "5.0.4");
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedName")
.IsUnique()
.HasDatabaseName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AccessFailedCount")
.HasColumnType("INTEGER");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken()
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Email")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<bool>("EmailConfirmed")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastActive")
.HasColumnType("TEXT");
b.Property<bool>("LockoutEnabled")
.HasColumnType("INTEGER");
b.Property<DateTimeOffset?>("LockoutEnd")
.HasColumnType("TEXT");
b.Property<string>("NormalizedEmail")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("NormalizedUserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.Property<string>("PasswordHash")
.HasColumnType("TEXT");
b.Property<string>("PhoneNumber")
.HasColumnType("TEXT");
b.Property<bool>("PhoneNumberConfirmed")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("SecurityStamp")
.HasColumnType("TEXT");
b.Property<bool>("TwoFactorEnabled")
.HasColumnType("INTEGER");
b.Property<string>("UserName")
.HasMaxLength(256)
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasDatabaseName("EmailIndex");
b.HasIndex("NormalizedUserName")
.IsUnique()
.HasDatabaseName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<bool>("AutoCloseMenu")
.HasColumnType("INTEGER");
b.Property<bool>("BookReaderDarkMode")
.HasColumnType("INTEGER");
b.Property<string>("BookReaderFontFamily")
.HasColumnType("TEXT");
b.Property<int>("BookReaderFontSize")
.HasColumnType("INTEGER");
b.Property<int>("BookReaderLineSpacing")
.HasColumnType("INTEGER");
b.Property<int>("BookReaderMargin")
.HasColumnType("INTEGER");
b.Property<int>("BookReaderReadingDirection")
.HasColumnType("INTEGER");
b.Property<bool>("BookReaderTapToPaginate")
.HasColumnType("INTEGER");
b.Property<int>("PageSplitOption")
.HasColumnType("INTEGER");
b.Property<int>("ReaderMode")
.HasColumnType("INTEGER");
b.Property<int>("ReadingDirection")
.HasColumnType("INTEGER");
b.Property<int>("ScalingOption")
.HasColumnType("INTEGER");
b.Property<bool>("SiteDarkMode")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId")
.IsUnique();
b.ToTable("AppUserPreferences");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<string>("BookScrollId")
.HasColumnType("TEXT");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("PagesRead")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserProgresses");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AppUserId")
.HasColumnType("INTEGER");
b.Property<int>("Rating")
.HasColumnType("INTEGER");
b.Property<string>("Review")
.HasColumnType("TEXT");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("AppUserId");
b.ToTable("AppUserRating");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<bool>("IsSpecial")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Number")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("Range")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.Property<int>("VolumeId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("VolumeId");
b.ToTable("Chapter");
});
modelBuilder.Entity("API.Entities.CollectionTag", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<string>("NormalizedTitle")
.HasColumnType("TEXT");
b.Property<bool>("Promoted")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.Property<string>("Title")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("Id", "Promoted")
.IsUnique();
b.ToTable("CollectionTag");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("Path")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.ToTable("FolderPath");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("CoverImage")
.HasColumnType("TEXT");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Type")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.ToTable("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ChapterId")
.HasColumnType("INTEGER");
b.Property<string>("FilePath")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("ChapterId");
b.ToTable("MangaFile");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<int>("LibraryId")
.HasColumnType("INTEGER");
b.Property<string>("LocalizedName")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("NormalizedName")
.HasColumnType("TEXT");
b.Property<string>("OriginalName")
.HasColumnType("TEXT");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<string>("SortName")
.HasColumnType("TEXT");
b.Property<string>("Summary")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("LibraryId");
b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId", "Format")
.IsUnique();
b.ToTable("Series");
});
modelBuilder.Entity("API.Entities.SeriesMetadata", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId")
.IsUnique();
b.HasIndex("Id", "SeriesId")
.IsUnique();
b.ToTable("SeriesMetadata");
});
modelBuilder.Entity("API.Entities.ServerSetting", b =>
{
b.Property<int>("Key")
.HasColumnType("INTEGER");
b.Property<uint>("RowVersion")
.IsConcurrencyToken()
.HasColumnType("INTEGER");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("Key");
b.ToTable("ServerSetting");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<byte[]>("CoverImage")
.HasColumnType("BLOB");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<int>("Number")
.HasColumnType("INTEGER");
b.Property<int>("Pages")
.HasColumnType("INTEGER");
b.Property<int>("SeriesId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("SeriesId");
b.ToTable("Volume");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.Property<int>("AppUsersId")
.HasColumnType("INTEGER");
b.Property<int>("LibrariesId")
.HasColumnType("INTEGER");
b.HasKey("AppUsersId", "LibrariesId");
b.HasIndex("LibrariesId");
b.ToTable("AppUserLibrary");
});
modelBuilder.Entity("CollectionTagSeriesMetadata", b =>
{
b.Property<int>("CollectionTagsId")
.HasColumnType("INTEGER");
b.Property<int>("SeriesMetadatasId")
.HasColumnType("INTEGER");
b.HasKey("CollectionTagsId", "SeriesMetadatasId");
b.HasIndex("SeriesMetadatasId");
b.ToTable("CollectionTagSeriesMetadata");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("RoleId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("ClaimType")
.HasColumnType("TEXT");
b.Property<string>("ClaimValue")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("ProviderKey")
.HasColumnType("TEXT");
b.Property<string>("ProviderDisplayName")
.HasColumnType("TEXT");
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.Property<int>("UserId")
.HasColumnType("INTEGER");
b.Property<string>("LoginProvider")
.HasColumnType("TEXT");
b.Property<string>("Name")
.HasColumnType("TEXT");
b.Property<string>("Value")
.HasColumnType("TEXT");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("API.Entities.AppUserPreferences", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithOne("UserPreferences")
.HasForeignKey("API.Entities.AppUserPreferences", "AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserProgress", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Progresses")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRating", b =>
{
b.HasOne("API.Entities.AppUser", "AppUser")
.WithMany("Ratings")
.HasForeignKey("AppUserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("AppUser");
});
modelBuilder.Entity("API.Entities.AppUserRole", b =>
{
b.HasOne("API.Entities.AppRole", "Role")
.WithMany("UserRoles")
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.AppUser", "User")
.WithMany("UserRoles")
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Role");
b.Navigation("User");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.HasOne("API.Entities.Volume", "Volume")
.WithMany("Chapters")
.HasForeignKey("VolumeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Volume");
});
modelBuilder.Entity("API.Entities.FolderPath", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Folders")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.MangaFile", b =>
{
b.HasOne("API.Entities.Chapter", "Chapter")
.WithMany("Files")
.HasForeignKey("ChapterId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Chapter");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.HasOne("API.Entities.Library", "Library")
.WithMany("Series")
.HasForeignKey("LibraryId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Library");
});
modelBuilder.Entity("API.Entities.SeriesMetadata", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithOne("Metadata")
.HasForeignKey("API.Entities.SeriesMetadata", "SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.HasOne("API.Entities.Series", "Series")
.WithMany("Volumes")
.HasForeignKey("SeriesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.Navigation("Series");
});
modelBuilder.Entity("AppUserLibrary", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("AppUsersId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.Library", null)
.WithMany()
.HasForeignKey("LibrariesId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("CollectionTagSeriesMetadata", b =>
{
b.HasOne("API.Entities.CollectionTag", null)
.WithMany()
.HasForeignKey("CollectionTagsId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
b.HasOne("API.Entities.SeriesMetadata", null)
.WithMany()
.HasForeignKey("SeriesMetadatasId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim<int>", b =>
{
b.HasOne("API.Entities.AppRole", null)
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken<int>", b =>
{
b.HasOne("API.Entities.AppUser", null)
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("API.Entities.AppRole", b =>
{
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.AppUser", b =>
{
b.Navigation("Progresses");
b.Navigation("Ratings");
b.Navigation("UserPreferences");
b.Navigation("UserRoles");
});
modelBuilder.Entity("API.Entities.Chapter", b =>
{
b.Navigation("Files");
});
modelBuilder.Entity("API.Entities.Library", b =>
{
b.Navigation("Folders");
b.Navigation("Series");
});
modelBuilder.Entity("API.Entities.Series", b =>
{
b.Navigation("Metadata");
b.Navigation("Volumes");
});
modelBuilder.Entity("API.Entities.Volume", b =>
{
b.Navigation("Chapters");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,44 @@
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class AddedSeriesFormat : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropIndex(
name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId",
table: "Series");
migrationBuilder.AddColumn<int>(
name: "Format",
table: "Series",
type: "INTEGER",
nullable: false,
defaultValue: 2);
migrationBuilder.CreateIndex(
name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId_Format",
table: "Series",
columns: new[] { "Name", "NormalizedName", "LocalizedName", "LibraryId", "Format" },
unique: true);
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropIndex(
name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId_Format",
table: "Series");
migrationBuilder.DropColumn(
name: "Format",
table: "Series");
migrationBuilder.CreateIndex(
name: "IX_Series_Name_NormalizedName_LocalizedName_LibraryId",
table: "Series",
columns: new[] { "Name", "NormalizedName", "LocalizedName", "LibraryId" },
unique: true);
}
}
}

View File

@ -412,6 +412,9 @@ namespace API.Data.Migrations
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<int>("Format")
.HasColumnType("INTEGER");
b.Property<DateTime>("LastModified")
.HasColumnType("TEXT");
@ -443,7 +446,7 @@ namespace API.Data.Migrations
b.HasIndex("LibraryId");
b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId")
b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId", "Format")
.IsUnique();
b.ToTable("Series");

View File

@ -62,11 +62,10 @@ namespace API.Data
await context.SaveChangesAsync();
// Port and LoggingLevel are managed in appSettings.json. Update the DB values to match
var configFile = Program.GetAppSettingFilename();
context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.Port).Value =
Configuration.GetPort(configFile) + "";
Configuration.Port + string.Empty;
context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.LoggingLevel).Value =
Configuration.GetLogLevel(configFile);
Configuration.LogLevel + string.Empty;
await context.SaveChangesAsync();

View File

@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
using API.DTOs;
using API.Entities;
using API.Extensions;
@ -16,7 +17,7 @@ namespace API.Data
{
private readonly DataContext _context;
private readonly IMapper _mapper;
private readonly NaturalSortComparer _naturalSortComparer = new ();
public SeriesRepository(DataContext context, IMapper mapper)
{
_context = context;
@ -110,10 +111,21 @@ namespace API.Data
.ToListAsync();
await AddVolumeModifiers(userId, volumes);
SortSpecialChapters(volumes);
return volumes;
}
private void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
foreach (var v in volumes.Where(vdto => vdto.Number == 0))
{
v.Chapters = v.Chapters.OrderBy(x => x.Range, _naturalSortComparer).ToList();
}
}
public async Task<IEnumerable<Volume>> GetVolumes(int seriesId)
{
@ -266,7 +278,7 @@ namespace API.Data
.SingleOrDefaultAsync();
}
private async Task AddVolumeModifiers(int userId, List<VolumeDto> volumes)
private async Task AddVolumeModifiers(int userId, IReadOnlyCollection<VolumeDto> volumes)
{
var userProgress = await _context.AppUserProgresses
.Where(p => p.AppUserId == userId && volumes.Select(s => s.Id).Contains(p.VolumeId))

View File

@ -82,7 +82,7 @@ namespace API.Data
public async Task<IList<MangaFile>> GetFilesForChapter(int chapterId)
{
return await _context.MangaFile
.Where(c => chapterId == c.Id)
.Where(c => chapterId == c.ChapterId)
.AsNoTracking()
.ToListAsync();
}

View File

@ -49,7 +49,7 @@ namespace API.Entities
{
Number = "0";
}
Title = (IsSpecial && info.Format == MangaFormat.Book)
Title = (IsSpecial && info.Format == MangaFormat.Epub)
? info.Title
: Range;

View File

@ -9,6 +9,6 @@ namespace API.Entities.Enums
[Description("Comic")]
Comic = 1,
[Description("Book")]
Book = 2
Book = 2,
}
}

View File

@ -10,7 +10,9 @@ namespace API.Entities.Enums
Archive = 1,
[Description("Unknown")]
Unknown = 2,
[Description("Book")]
Book = 3
[Description("EPUB")]
Epub = 3,
[Description("PDF")]
Pdf = 4
}
}

View File

@ -5,10 +5,13 @@ namespace API.Entities.Enums
public enum ReaderMode
{
[Description("Left and Right")]
// ReSharper disable once InconsistentNaming
MANGA_LR = 0,
[Description("Up and Down")]
// ReSharper disable once InconsistentNaming
MANGA_UP = 1,
[Description("Webtoon")]
// ReSharper disable once InconsistentNaming
WEBTOON = 2
}
}

View File

@ -7,7 +7,7 @@ namespace API.Entities
{
public int Id { get; set; }
public string Name { get; set; }
// TODO: MetadataUpdate add ProviderId
// MetadataUpdate add ProviderId
[ConcurrencyCheck]
public uint RowVersion { get; set; }

View File

@ -2,7 +2,6 @@
using System;
using System.IO;
using API.Entities.Enums;
using API.Extensions;
namespace API.Entities
{
@ -30,7 +29,7 @@ namespace API.Entities
// Methods
public bool HasFileBeenModified()
{
return new FileInfo(FilePath).DoesLastWriteMatch(LastModified);
return !File.GetLastWriteTime(FilePath).Equals(LastModified);
}
}
}

View File

@ -1,11 +1,12 @@
using System;
using System.Collections.Generic;
using API.Entities.Enums;
using API.Entities.Interfaces;
using Microsoft.EntityFrameworkCore;
namespace API.Entities
{
[Index(nameof(Name), nameof(NormalizedName), nameof(LocalizedName), nameof(LibraryId), IsUnique = true)]
[Index(nameof(Name), nameof(NormalizedName), nameof(LocalizedName), nameof(LibraryId), nameof(Format), IsUnique = true)]
public class Series : IEntityDate
{
public int Id { get; set; }
@ -41,6 +42,11 @@ namespace API.Entities
/// </summary>
public int Pages { get; set; }
/// <summary>
/// The type of all the files attached to this series
/// </summary>
public MangaFormat Format { get; set; } = MangaFormat.Unknown;
public SeriesMetadata Metadata { get; set; }
// Relationships

View File

@ -9,10 +9,6 @@ namespace API.Entities
public class SeriesMetadata : IHasConcurrencyToken
{
public int Id { get; set; }
/// <summary>
/// Publisher of book or manga/comic
/// </summary>
//public string Publisher { get; set; }
public ICollection<CollectionTag> CollectionTags { get; set; }

View File

@ -16,7 +16,7 @@ namespace API.Extensions
{
public static class ApplicationServiceExtensions
{
public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env)
public static void AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env)
{
services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly);
services.AddScoped<IStatsService, StatsService>();
@ -31,28 +31,29 @@ namespace API.Extensions
services.AddScoped<IBackupService, BackupService>();
services.AddScoped<ICleanupService, CleanupService>();
services.AddScoped<IBookService, BookService>();
services.AddScoped<IImageService, ImageService>();
services.AddSqLite(config, env);
services.AddLogging(loggingBuilder =>
{
var loggingSection = config.GetSection("Logging");
loggingBuilder.AddFile(loggingSection);
});
return services;
services.AddLogging(config);
}
private static IServiceCollection AddSqLite(this IServiceCollection services, IConfiguration config,
private static void AddSqLite(this IServiceCollection services, IConfiguration config,
IWebHostEnvironment env)
{
services.AddDbContext<DataContext>(options =>
{
options.UseSqlite(config.GetConnectionString("DefaultConnection"));
options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.GetLogLevel(Program.GetAppSettingFilename()).Equals("Debug"));
options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.LogLevel.Equals("Debug"));
});
}
return services;
private static void AddLogging(this IServiceCollection services, IConfiguration config)
{
services.AddLogging(loggingBuilder =>
{
var loggingSection = config.GetSection("Logging");
loggingBuilder.AddFile(loggingSection);
});
}
}
}

View File

@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using System.Linq;
using API.Comparators;
@ -9,8 +10,20 @@ namespace API.Extensions
private static readonly NaturalSortComparer Comparer = new NaturalSortComparer();
public static void Empty(this DirectoryInfo directory)
{
foreach(FileInfo file in directory.EnumerateFiles()) file.Delete();
foreach(DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true);
// NOTE: We have this in DirectoryService.Empty(), do we need this here?
foreach(FileInfo file in directory.EnumerateFiles()) file.Delete();
foreach(DirectoryInfo subDirectory in directory.EnumerateDirectories()) subDirectory.Delete(true);
}
public static void RemoveNonImages(this DirectoryInfo directory)
{
foreach (var file in directory.EnumerateFiles())
{
if (!Parser.Parser.IsImage(file.FullName))
{
file.Delete();
}
}
}
/// <summary>

View File

@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using API.Entities;
using API.Entities.Enums;
using API.Parser;
namespace API.Extensions
@ -29,5 +30,16 @@ namespace API.Extensions
return chapter.IsSpecial ? infos.Any(v => v.Filename == chapter.Range)
: infos.Any(v => v.Chapters == chapter.Range);
}
/// <summary>
/// Returns the MangaFormat that is common to all the files. Unknown if files are mixed (should never happen) or no infos
/// </summary>
/// <param name="infos"></param>
/// <returns></returns>
public static MangaFormat GetFormat(this IList<ParserInfo> infos)
{
if (infos.Count == 0) return MangaFormat.Unknown;
return infos.DistinctBy(x => x.Format).First().Format;
}
}
}

View File

@ -1,6 +1,8 @@
using System.Collections.Generic;
using System.Linq;
using API.Entities;
using API.Parser;
using API.Services.Tasks.Scanner;
namespace API.Extensions
{
@ -14,7 +16,34 @@ namespace API.Extensions
/// <returns></returns>
public static bool NameInList(this Series series, IEnumerable<string> list)
{
return list.Any(name => Parser.Parser.Normalize(name) == series.NormalizedName || Parser.Parser.Normalize(name) == Parser.Parser.Normalize(series.Name) || name == series.Name || name == series.LocalizedName || name == series.OriginalName);
return list.Any(name => Parser.Parser.Normalize(name) == series.NormalizedName || Parser.Parser.Normalize(name) == Parser.Parser.Normalize(series.Name)
|| name == series.Name || name == series.LocalizedName || name == series.OriginalName || Parser.Parser.Normalize(name) == Parser.Parser.Normalize(series.OriginalName));
}
/// <summary>
/// Checks against all the name variables of the Series if it matches anything in the list. Includes a check against the Format of the Series
/// </summary>
/// <param name="series"></param>
/// <param name="list"></param>
/// <returns></returns>
public static bool NameInList(this Series series, IEnumerable<ParsedSeries> list)
{
return list.Any(name => Parser.Parser.Normalize(name.Name) == series.NormalizedName || Parser.Parser.Normalize(name.Name) == Parser.Parser.Normalize(series.Name)
|| name.Name == series.Name || name.Name == series.LocalizedName || name.Name == series.OriginalName || Parser.Parser.Normalize(name.Name) == Parser.Parser.Normalize(series.OriginalName) && series.Format == name.Format);
}
/// <summary>
/// Checks against all the name variables of the Series if it matches the <see cref="ParserInfo"/>
/// </summary>
/// <param name="series"></param>
/// <param name="info"></param>
/// <returns></returns>
public static bool NameInParserInfo(this Series series, ParserInfo info)
{
if (info == null) return false;
return Parser.Parser.Normalize(info.Series) == series.NormalizedName || Parser.Parser.Normalize(info.Series) == Parser.Parser.Normalize(series.Name)
|| info.Series == series.Name || info.Series == series.LocalizedName || info.Series == series.OriginalName
|| Parser.Parser.Normalize(info.Series) == Parser.Parser.Normalize(series.OriginalName);
}
}
}

View File

@ -1,5 +1,4 @@
using System;
using API.Interfaces.Services;
using API.Interfaces.Services;
using API.Services.Clients;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;

View File

@ -19,11 +19,11 @@ namespace API.Extensions
/// If there are both specials and non-specials, then the first non-special will be returned.
/// </summary>
/// <param name="volumes"></param>
/// <param name="libraryType"></param>
/// <param name="seriesFormat"></param>
/// <returns></returns>
public static Volume GetCoverImage(this IList<Volume> volumes, LibraryType libraryType)
public static Volume GetCoverImage(this IList<Volume> volumes, MangaFormat seriesFormat)
{
if (libraryType == LibraryType.Book)
if (seriesFormat is MangaFormat.Epub or MangaFormat.Pdf)
{
return volumes.OrderBy(x => x.Number).FirstOrDefault();
}

View File

@ -10,10 +10,12 @@ namespace API.Interfaces
{
void Add(Library library);
void Update(Library library);
void Delete(Library library);
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
Task<bool> LibraryExists(string libraryName);
Task<Library> GetLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId);
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
Task<IEnumerable<Library>> GetLibrariesAsync();
Task<bool> DeleteLibrary(int libraryId);

View File

@ -11,6 +11,7 @@
void RefreshMetadata(int libraryId, bool forceUpdate = true);
void CleanupTemp();
void RefreshSeriesMetadata(int libraryId, int seriesId);
void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false);
void ScheduleStatsTasks();
void CancelStatsTasks();
}

View File

@ -3,7 +3,7 @@ using System.Threading.Tasks;
using API.Parser;
using VersOne.Epub;
namespace API.Interfaces
namespace API.Interfaces.Services
{
public interface IBookService
{
@ -22,5 +22,11 @@ namespace API.Interfaces
Task<string> ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book);
string GetSummaryInfo(string filePath);
ParserInfo ParseInfo(string filePath);
/// <summary>
/// Extracts a PDF file's pages as images to an target directory
/// </summary>
/// <param name="fileFilePath"></param>
/// <param name="targetDirectory">Where the files will be extracted to. If doesn't exist, will be created.</param>
void ExtractPdfImages(string fileFilePath, string targetDirectory);
}
}

View File

@ -1,4 +1,5 @@
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
namespace API.Interfaces.Services
@ -22,7 +23,7 @@ namespace API.Interfaces.Services
/// Clears cache directory of all volumes. This can be invoked from deleting a library or a series.
/// </summary>
/// <param name="chapterIds">Volumes that belong to that library. Assume the library might have been deleted before this invocation.</param>
void CleanupChapters(int[] chapterIds);
void CleanupChapters(IEnumerable<int> chapterIds);
/// <summary>

View File

@ -25,5 +25,8 @@ namespace API.Interfaces.Services
IEnumerable<string> GetFiles(string path, string searchPatternExpression = "",
SearchOption searchOption = SearchOption.TopDirectoryOnly);
void CopyFileToDirectory(string fullFilePath, string targetDirectory);
public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "*");
}
}

View File

@ -0,0 +1,10 @@
using API.Entities;
namespace API.Interfaces.Services
{
public interface IImageService
{
byte[] GetCoverImage(string path, bool createThumbnail = false);
string GetCoverFile(MangaFile file);
}
}

View File

@ -1,4 +1,7 @@

using System.Threading;
using System.Threading.Tasks;
namespace API.Interfaces.Services
{
public interface IScannerService
@ -11,5 +14,6 @@ namespace API.Interfaces.Services
/// <param name="forceUpdate">Force overwriting for cover images</param>
void ScanLibrary(int libraryId, bool forceUpdate);
void ScanLibraries();
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token);
}
}

View File

@ -1,5 +1,5 @@
using System.Threading.Tasks;
using API.DTOs;
using API.DTOs.Stats;
namespace API.Interfaces.Services
{

View File

@ -11,19 +11,38 @@ namespace API.Parser
{
public const string DefaultChapter = "0";
public const string DefaultVolume = "0";
private static readonly TimeSpan RegexTimeout = TimeSpan.FromMilliseconds(500);
public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7";
public const string BookFileExtensions = @"\.epub";
public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)";
public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))", RegexOptions.IgnoreCase | RegexOptions.Compiled);
public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled);
public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|\.cb7|\.cbt";
public const string BookFileExtensions = @"\.epub|\.pdf";
public const string SupportedExtensions =
ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions;
public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
private static readonly string XmlRegexExtensions = @"\.xml";
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex BookFileRegex = new Regex(BookFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex CoverImageRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(cover|folder)(?![\w\d])", RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
private static readonly Regex BookFileRegex = new Regex(BookFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
private static readonly Regex CoverImageRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(cover|folder)(?![\w\d])",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
private static readonly Regex[] MangaVolumeRegex = new[]
@ -31,151 +50,200 @@ namespace API.Parser
// Dance in the Vampire Bund v16-17
new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar
new Regex(
@"(?<Series>.*)(\b|_)(?!\[)(vol\.?)(?<Volume>\d+(-\d+)?)(?!\])",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
new Regex(
@"(?<Series>.*)(\b|_)(?!\[)v(?<Volume>\d+(-\d+)?)(?!\])",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Kodomo no Jikan vol. 10
new Regex(
@"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex(
@"(vol\.? ?)(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Tonikaku Cawaii [Volume 11].cbz
new Regex(
@"(volume )(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Tower Of God S01 014 (CBT) (digital).cbz
new Regex(
@"(?<Series>.*)(\b|_|)(S(?<Volume>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] MangaSeriesRegex = new[]
{
// [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar
// Grand Blue Dreaming - SP02
new Regex(
@"^(?<Series>.*)( |_)Vol\.?\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"(?<Series>.*)(\b|_|-|\s)(?:sp)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar, Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz
new Regex(
@"^(?<Series>.*)( |_)Vol\.?(\d+|tbd)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip
new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)( |_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)(\s|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
new Regex(
@"(?<Series>.*)( - )(?:v|vo|c)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz
new Regex(
@"(?<Series>.*) (\b|_|-)(vol)\.?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip
new Regex(
@"(?<Series>.*)(?:, Chapter )(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex)
new Regex(
@"(?<Series>.*)(\s|_|-)(?:Volume(\s|_|-)+\d+)(\s|_|-)+(?:Chapter)(\s|_|-)(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz
new Regex(
@"(?<Series>.*)(\s|_|-)(?!Vol)(\s|_|-)(?:Chapter)(\s|_|-)(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz
new Regex(
@"(?<Series>.*) (\b|_|-)(vol)\.?",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
//Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]
new Regex(
@"(?<Series>.*)(\bc\d+\b)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
//Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz
new Regex(
@"(?<Series>.*)(?: _|-|\[|\()\s?vol(ume)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Momo The Blood Taker - Chapter 027 Violent Emotion.cbz
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz
new Regex(
@"(?<Series>.*)(\b|_|-|\s)(?:chapter)(\b|_|-|\s)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"(?<Series>.*)(\b|_|-|\s)(?:(chapter(\b|_|-|\s))|sp)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex(
@"(?<Series>.*) (\b|_|-)(v|ch\.?|c)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
//Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
// due to duplicate version identifiers in file.
new Regex(
@"(?<Series>.*)(v|s)\d+(-\d+)?(_|\s)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
//[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex(
@"(?<Series>.*)(v|s)\d+(-\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz
new Regex(
@"(?<Series>.*) (?<Chapter>\d+) (?:\(\d{4}\)) ",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)
new Regex(
@"(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?) \(\d{4}\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Noblesse - Episode 429 (74 Pages).7z
new Regex(
@"(?<Series>.*)(\s|_)(?:Episode|Ep\.?)(\s|_)(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)
new Regex(
@"(?<Series>.*)\(\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Tonikaku Kawaii (Ch 59-67) (Ongoing)
new Regex(
@"(?<Series>.*)(\s|_)\((c\s|ch\s|chapter\s)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Black Bullet (This is very loose, keep towards bottom)
new Regex(
@"(?<Series>.*)(_)(v|vo|c|volume)( |_)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar
new Regex(
@"(?<Series>.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1
new Regex(
@"(?<Series>.*)( |_)(?:Chp.? ?\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01
new Regex(
@"^(?!Vol)(?<Series>.*)( |_)Chapter( |_)(\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Fullmetal Alchemist chapters 101-108.cbz
new Regex(
@"^(?!vol)(?<Series>.*)( |_)(chapters( |_)?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1
new Regex(
@"^(?!Vol\.?)(?<Series>.*)( |_|-)(?<!-)(episode ?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"^(?!Vol\.?)(?<Series>.*)( |_|-)(?<!-)(episode|chapter|(ch\.?) ?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Baketeriya ch01-05.zip
new Regex(
@"^(?!Vol)(?<Series>.*)ch\d+-?\d?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Magi - Ch.252-005.cbz
new Regex(
@"(?<Series>.*)( ?- ?)Ch\.\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [BAA]_Darker_than_Black_Omake-1.zip
new Regex(
@"^(?!Vol)(?<Series>.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?<Series>.*)( |_)(\d+)
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Kodoja #001 (March 2016)
new Regex(
@"(?<Series>.*)(\s|_|-)#",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Baketeriya ch01-05.zip, Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar
new Regex(
@"^(?!Vol\.?)(?<Series>.*)( |_|-)(?<!-)(ch)?\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last)
new Regex(
@"^(?!Vol)(?<Series>.*)( |_|-)(ch?)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] ComicSeriesRegex = new[]
@ -183,51 +251,63 @@ namespace API.Parser
// Invincible Vol 01 Family matters (2005) (Digital)
new Regex(
@"(?<Series>.*)(\b|_)(vol\.?)( |_)(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
new Regex(
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// 01 Spider-Man & Wolverine 01.cbr
new Regex(
@"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Wildcat (1 of 3)
new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Volume>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: |_)v\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Amazing Man Comics chapter 25
new Regex(
@"^(?<Series>.*)(?: |_)c(hapter) \d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Amazing Man Comics issue #25
new Regex(
@"^(?<Series>.*)(?: |_)i(ssue) #\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.*)(?: |_)#\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
new Regex(
@"^(?<Series>.*)(?: |_)(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// The First Asterix Frieze (WebP by Doc MaKS)
new Regex(
@"^(?<Series>.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// MUST BE LAST: Batman & Daredevil - King of New York
new Regex(
@"^(?<Series>.*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] ComicVolumeRegex = new[]
@ -235,78 +315,85 @@ namespace API.Parser
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
new Regex(
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// 01 Spider-Man & Wolverine 01.cbr
new Regex(
@"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Wildcat (1 of 3)
new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: |_)v(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
new Regex(
@"^(?<Series>.*)(?<!c(hapter)|i(ssue))(?<!of)(?: |_)(?<!of )(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?<!c(hapter)|i(ssue))(?<!of)(?: (?<Volume>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.*)(?: |_)#(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] ComicChapterRegex = new[]
{
// // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
// new Regex(
// @"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled),
// // 01 Spider-Man & Wolverine 01.cbr
// new Regex(
// @"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?", // NOTE: WHy is this here without a capture group
// RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Batman & Wildcat (1 of 3)
// Batman & Wildcat (1 of 3)
new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: |_)v(?<Volume>\d+)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.*)(?: (?<Volume>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.*)(?: |_)#(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr
new Regex(
@"^(?<Series>.*)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Amazing Man Comics chapter 25
new Regex(
@"^(?!Vol)(?<Series>.*)( |_)c(hapter)( |_)(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Amazing Man Comics issue #25
new Regex(
@"^(?!Vol)(?<Series>.*)( |_)i(ssue)( |_) #(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] ReleaseGroupRegex = new[]
{
// [TrinityBAKumA Finella&anon], [BAA]_, [SlowManga&OverloadScans], [batoto]
new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// (Shadowcat-Empire),
// new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled),
@ -317,58 +404,76 @@ namespace API.Parser
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5
new Regex(
@"(\b|_)(c|ch)(\.?\s?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex(
@"v\d+\.(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove)
new Regex(
@"^(?<Series>.*)(?: |_)#(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Green Worldz - Chapter 027
new Regex(
@"^(?!Vol)(?<Series>.*)\s?(?<!vol\. )\sChapter\s(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"^(?!Vol)(?<Series>.*)\s?(?<!vol\. )\sChapter\s(?<Chapter>\d+(?:\.?[\d-])?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz
new Regex(
@"^(?!Vol)(?<Series>.*) (?<!vol\. )(?<Chapter>\d+(?:.\d+|-\d+)?)(?: \(\d{4}\))?(\b|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"^(?!Vol)(?<Series>.*)\s(?<!vol\. )(?<Chapter>\d+(?:.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Tower Of God S01 014 (CBT) (digital).cbz
new Regex(
@"(?<Series>.*) S(?<Volume>\d+) (?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"(?<Series>.*)\sS(?<Volume>\d+)\s(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip
new Regex(
@"^((?!v|vo|vol|Volume).)*( |_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<ChapterPart>b)?( |_|\[|\()",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"^((?!v|vo|vol|Volume).)*(\s|_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<ChapterPart>b)?(\s|_|\[|\()",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Yumekui-Merry_DKThias_Chapter21.zip
new Regex(
@"Chapter(?<Chapter>\d+(-\d+)?)", //(?:.\d+|-\d+)?
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar
new Regex(
@"(?<Series>.*)( |_)(vol\d+)?( |_)Chp\.? ?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
@"(?<Series>.*)(\s|_)(vol\d+)?(\s|_)Chp\.? ?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Vol 1 Chapter 2
new Regex(
@"(?<Volume>((vol|volume|v))?(\s|_)?\.?\d+)(\s|_)(Chp|Chapter)\.?(\s|_)?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] MangaEditionRegex = {
// Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz
new Regex(
@"(?<Edition>({|\(|\[).* Edition(}|\)|\]))",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz
new Regex(
@"(\b|_)(?<Edition>Omnibus(( |_)?Edition)?)(\b|_)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// To Love Ru v01 Uncensored (Ch.001-007)
new Regex(
@"(\b|_)(?<Edition>Uncensored)(\b|_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz
new Regex(
@"(\b|_)(?<Edition>Full(?: |_)Color)(\b|_)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] CleanupRegex =
@ -376,15 +481,18 @@ namespace API.Parser
// (), {}, []
new Regex(
@"(?<Cleanup>(\{\}|\[\]|\(\)))",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// (Complete)
new Regex(
@"(?<Cleanup>(\{Complete\}|\[Complete\]|\(Complete\)))",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Anything in parenthesis
new Regex(
@"\(.*\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
private static readonly Regex[] MangaSpecialRegex =
@ -392,13 +500,15 @@ namespace API.Parser
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
new Regex(
@"(?<Special>Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
};
// If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found.
private static readonly Regex SpecialMarkerRegex = new Regex(
@"(?<Special>SP\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout
);
@ -415,7 +525,7 @@ namespace API.Parser
var fileName = Path.GetFileName(filePath);
ParserInfo ret;
if (type == LibraryType.Book)
if (IsEpub(filePath))
{
ret = new ParserInfo()
{
@ -441,31 +551,18 @@ namespace API.Parser
};
}
if (ret.Series == string.Empty)
if (IsImage(filePath))
{
// Reset Chapters, Volumes, and Series as images are not good to parse information out of. Better to use folders.
ret.Volumes = DefaultVolume;
ret.Chapters = DefaultChapter;
ret.Series = string.Empty;
}
if (ret.Series == string.Empty || IsImage(filePath))
{
// Try to parse information out of each folder all the way to rootPath
var fallbackFolders = DirectoryService.GetFoldersTillRoot(rootPath, Path.GetDirectoryName(filePath)).ToList();
for (var i = 0; i < fallbackFolders.Count; i++)
{
var folder = fallbackFolders[i];
if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue;
if (ParseVolume(folder) != DefaultVolume || ParseChapter(folder) != DefaultChapter) continue;
var series = ParseSeries(folder);
if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1))
{
ret.Series = CleanTitle(folder);
break;
}
if (!string.IsNullOrEmpty(series))
{
ret.Series = series;
break;
}
}
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
}
var edition = ParseEdition(fileName);
@ -488,21 +585,80 @@ namespace API.Parser
ret.IsSpecial = true;
ret.Chapters = DefaultChapter;
ret.Volumes = DefaultVolume;
ParseFromFallbackFolders(filePath, rootPath, type, ref ret);
}
// here is the issue. If we are a special with marker, we need to ensure we use the correct series name.
// we can do this by falling back
if (string.IsNullOrEmpty(ret.Series))
{
ret.Series = CleanTitle(fileName);
}
// Pdfs may have .pdf in the series name, remove that
if (IsPdf(fileName) && ret.Series.ToLower().EndsWith(".pdf"))
{
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
}
return ret.Series == string.Empty ? null : ret;
}
/// <summary>
///
/// </summary>
/// <param name="filePath"></param>
/// <param name="rootPath"></param>
/// <param name="type"></param>
/// <param name="ret">Expects a non-null ParserInfo which this method will populate</param>
public static void ParseFromFallbackFolders(string filePath, string rootPath, LibraryType type, ref ParserInfo ret)
{
var fallbackFolders = DirectoryService.GetFoldersTillRoot(rootPath, filePath).ToList();
for (var i = 0; i < fallbackFolders.Count; i++)
{
var folder = fallbackFolders[i];
if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue;
var parsedVolume = type is LibraryType.Manga ? ParseVolume(folder) : ParseComicVolume(folder);
var parsedChapter = type is LibraryType.Manga ? ParseChapter(folder) : ParseComicChapter(folder);
if (!parsedVolume.Equals(DefaultVolume) || !parsedChapter.Equals(DefaultChapter))
{
if ((ret.Volumes.Equals(DefaultVolume) || string.IsNullOrEmpty(ret.Volumes)) && !parsedVolume.Equals(DefaultVolume))
{
ret.Volumes = parsedVolume;
}
if ((ret.Chapters.Equals(DefaultChapter) || string.IsNullOrEmpty(ret.Chapters)) && !parsedChapter.Equals(DefaultChapter))
{
ret.Chapters = parsedChapter;
}
continue;
}
var series = ParseSeries(folder);
if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1))
{
ret.Series = CleanTitle(folder);
break;
}
if (!string.IsNullOrEmpty(series))
{
ret.Series = series;
break;
}
}
}
public static MangaFormat ParseFormat(string filePath)
{
if (IsArchive(filePath)) return MangaFormat.Archive;
if (IsImage(filePath)) return MangaFormat.Image;
if (IsBook(filePath)) return MangaFormat.Book;
if (IsEpub(filePath)) return MangaFormat.Epub;
if (IsPdf(filePath)) return MangaFormat.Pdf;
return MangaFormat.Unknown;
}
@ -890,5 +1046,10 @@ namespace API.Parser
{
return Path.GetExtension(filePath).ToLower() == ".epub";
}
public static bool IsPdf(string filePath)
{
return Path.GetExtension(filePath).ToLower() == ".pdf";
}
}
}

View File

@ -5,7 +5,6 @@ using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Entities;
using API.Services.HostedServices;
using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Microsoft.AspNetCore.Hosting;
@ -19,132 +18,117 @@ using Sentry;
namespace API
{
public class Program
{
private static int _httpPort;
public class Program
{
private static readonly int HttpPort = Configuration.Port;
protected Program()
{
}
protected Program()
{
}
public static string GetAppSettingFilename()
{
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
var isDevelopment = environment == Environments.Development;
return "appsettings" + (isDevelopment ? ".Development" : "") + ".json";
}
public static async Task Main(string[] args)
{
Console.OutputEncoding = System.Text.Encoding.UTF8;
public static async Task Main(string[] args)
{
Console.OutputEncoding = System.Text.Encoding.UTF8;
// Before anything, check if JWT has been generated properly or if user still has default
if (!Configuration.CheckIfJwtTokenSet() &&
Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") != Environments.Development)
{
Console.WriteLine("Generating JWT TokenKey for encrypting user sessions...");
var rBytes = new byte[128];
using (var crypto = new RNGCryptoServiceProvider()) crypto.GetBytes(rBytes);
Configuration.JwtToken = Convert.ToBase64String(rBytes).Replace("/", string.Empty);
}
// Before anything, check if JWT has been generated properly or if user still has default
if (!Configuration.CheckIfJwtTokenSet(GetAppSettingFilename()) && Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") != Environments.Development)
var host = CreateHostBuilder(args).Build();
using var scope = host.Services.CreateScope();
var services = scope.ServiceProvider;
try
{
var context = services.GetRequiredService<DataContext>();
var roleManager = services.GetRequiredService<RoleManager<AppRole>>();
// Apply all migrations on startup
await context.Database.MigrateAsync();
await Seed.SeedRoles(roleManager);
await Seed.SeedSettings(context);
}
catch (Exception ex)
{
var logger = services.GetRequiredService<ILogger<Program>>();
logger.LogError(ex, "An error occurred during migration");
}
await host.RunAsync();
}
private static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
Console.WriteLine("Generating JWT TokenKey for encrypting user sessions...");
var rBytes = new byte[128];
using (var crypto = new RNGCryptoServiceProvider()) crypto.GetBytes(rBytes);
var base64 = Convert.ToBase64String(rBytes).Replace("/", "");
Configuration.UpdateJwtToken(GetAppSettingFilename(), base64);
}
webBuilder.UseKestrel((opts) =>
{
opts.ListenAnyIP(HttpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; });
});
// Get HttpPort from Config
_httpPort = Configuration.GetPort(GetAppSettingFilename());
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
if (environment != Environments.Development)
{
webBuilder.UseSentry(options =>
{
options.Dsn = "https://40f4e7b49c094172a6f99d61efb2740f@o641015.ingest.sentry.io/5757423";
options.MaxBreadcrumbs = 200;
options.AttachStacktrace = true;
options.Debug = false;
options.SendDefaultPii = false;
options.DiagnosticLevel = SentryLevel.Debug;
options.ShutdownTimeout = TimeSpan.FromSeconds(5);
options.Release = BuildInfo.Version.ToString();
options.AddExceptionFilterForType<OutOfMemoryException>();
options.AddExceptionFilterForType<NetVips.VipsException>();
options.AddExceptionFilterForType<InvalidDataException>();
options.AddExceptionFilterForType<KavitaException>();
var host = CreateHostBuilder(args).Build();
using var scope = host.Services.CreateScope();
var services = scope.ServiceProvider;
try
{
var context = services.GetRequiredService<DataContext>();
var roleManager = services.GetRequiredService<RoleManager<AppRole>>();
// Apply all migrations on startup
await context.Database.MigrateAsync();
await Seed.SeedRoles(roleManager);
await Seed.SeedSettings(context);
}
catch (Exception ex)
{
var logger = services.GetRequiredService <ILogger<Program>>();
logger.LogError(ex, "An error occurred during migration");
}
await host.RunAsync();
}
private static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseKestrel((opts) =>
{
opts.ListenAnyIP(_httpPort, options =>
options.BeforeSend = sentryEvent =>
{
if (sentryEvent.Exception != null
&& sentryEvent.Exception.Message.StartsWith("[GetCoverImage]")
&& sentryEvent.Exception.Message.StartsWith("[BookService]")
&& sentryEvent.Exception.Message.StartsWith("[ExtractArchive]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]")
&& sentryEvent.Exception.Message.Contains("EPUB parsing error")
&& sentryEvent.Exception.Message.Contains("Unsupported EPUB version")
&& sentryEvent.Exception.Message.Contains("Incorrect EPUB")
&& sentryEvent.Exception.Message.Contains("Access is Denied"))
{
options.Protocols = HttpProtocols.Http1AndHttp2;
});
});
return null; // Don't send this event to Sentry
}
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
if (environment != Environments.Development)
{
webBuilder.UseSentry(options =>
sentryEvent.ServerName = null; // Never send Server Name to Sentry
return sentryEvent;
};
options.ConfigureScope(scope =>
{
scope.User = new User()
{
options.Dsn = "https://40f4e7b49c094172a6f99d61efb2740f@o641015.ingest.sentry.io/5757423";
options.MaxBreadcrumbs = 200;
options.AttachStacktrace = true;
options.Debug = false;
options.SendDefaultPii = false;
options.DiagnosticLevel = SentryLevel.Debug;
options.ShutdownTimeout = TimeSpan.FromSeconds(5);
options.Release = BuildInfo.Version.ToString();
options.AddExceptionFilterForType<OutOfMemoryException>();
options.AddExceptionFilterForType<NetVips.VipsException>();
options.AddExceptionFilterForType<InvalidDataException>();
options.AddExceptionFilterForType<KavitaException>();
Id = HashUtil.AnonymousToken()
};
scope.Contexts.App.Name = BuildInfo.AppName;
scope.Contexts.App.Version = BuildInfo.Version.ToString();
scope.Contexts.App.StartTime = DateTime.UtcNow;
scope.Contexts.App.Hash = HashUtil.AnonymousToken();
scope.Contexts.App.Build = BuildInfo.Release;
scope.SetTag("culture", Thread.CurrentThread.CurrentCulture.Name);
scope.SetTag("branch", BuildInfo.Branch);
});
});
}
options.BeforeSend = sentryEvent =>
{
if (sentryEvent.Exception != null
&& sentryEvent.Exception.Message.StartsWith("[GetCoverImage]")
&& sentryEvent.Exception.Message.StartsWith("[BookService]")
&& sentryEvent.Exception.Message.StartsWith("[ExtractArchive]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]")
&& sentryEvent.Exception.Message.Contains("EPUB parsing error")
&& sentryEvent.Exception.Message.Contains("Unsupported EPUB version")
&& sentryEvent.Exception.Message.Contains("Incorrect EPUB")
&& sentryEvent.Exception.Message.Contains("Access is Denied"))
{
return null; // Don't send this event to Sentry
}
sentryEvent.ServerName = null; // Never send Server Name to Sentry
return sentryEvent;
};
options.ConfigureScope(scope =>
{
scope.User = new User()
{
Id = HashUtil.AnonymousToken()
};
scope.Contexts.App.Name = BuildInfo.AppName;
scope.Contexts.App.Version = BuildInfo.Version.ToString();
scope.Contexts.App.StartTime = DateTime.UtcNow;
scope.Contexts.App.Hash = HashUtil.AnonymousToken();
scope.Contexts.App.Build = BuildInfo.Release;
scope.SetTag("culture", Thread.CurrentThread.CurrentCulture.Name);
scope.SetTag("branch", BuildInfo.Branch);
});
});
}
webBuilder.UseStartup<Startup>();
});
}
webBuilder.UseStartup<Startup>();
});
}
}

View File

@ -28,7 +28,6 @@ namespace API.Services
{
private readonly ILogger<ArchiveService> _logger;
private readonly IDirectoryService _directoryService;
private const int ThumbnailWidth = 320; // 153w x 230h
private static readonly RecyclableMemoryStreamManager StreamManager = new();
private readonly NaturalSortComparer _comparer;
@ -261,7 +260,7 @@ namespace API.Services
}
try
{
using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth);
using var thumbnail = Image.ThumbnailStream(stream, MetadataService.ThumbnailWidth);
return thumbnail.WriteToBuffer(formatExtension);
}
catch (Exception ex)
@ -302,7 +301,6 @@ namespace API.Services
entry.WriteTo(ms);
ms.Position = 0;
var serializer = new XmlSerializer(typeof(ComicInfo));
var info = (ComicInfo) serializer.Deserialize(ms);
return info;
@ -384,15 +382,15 @@ namespace API.Services
private void ExtractArchiveEntries(ZipArchive archive, string extractPath)
{
// TODO: In cases where we try to extract, but there are InvalidPathChars, we need to inform the user
var needsFlattening = ArchiveNeedsFlattening(archive);
if (!archive.HasFiles() && !needsFlattening) return;
archive.ExtractToDirectory(extractPath, true);
if (needsFlattening)
{
_logger.LogDebug("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
}
if (!needsFlattening) return;
_logger.LogDebug("Extracted archive is nested in root folder, flattening...");
new DirectoryInfo(extractPath).Flatten();
}
/// <summary>

View File

@ -1,32 +1,41 @@
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using System.Web;
using API.Entities.Enums;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using Docnet.Core;
using Docnet.Core.Converters;
using Docnet.Core.Models;
using Docnet.Core.Readers;
using ExCSS;
using HtmlAgilityPack;
using Microsoft.Extensions.Logging;
using NetVips;
using Microsoft.IO;
using VersOne.Epub;
using Image = NetVips.Image;
using Point = System.Drawing.Point;
namespace API.Services
{
public class BookService : IBookService
{
private readonly ILogger<BookService> _logger;
private const int ThumbnailWidth = 320; // 153w x 230h
private readonly StylesheetParser _cssParser = new ();
private static readonly RecyclableMemoryStreamManager StreamManager = new ();
public BookService(ILogger<BookService> logger)
{
_logger = logger;
}
private static bool HasClickableHrefPart(HtmlNode anchor)
@ -159,7 +168,8 @@ namespace API.Services
public string GetSummaryInfo(string filePath)
{
if (!IsValidFile(filePath)) return string.Empty;
if (!IsValidFile(filePath) || Parser.Parser.IsPdf(filePath)) return string.Empty;
try
{
@ -184,18 +194,24 @@ namespace API.Services
if (Parser.Parser.IsBook(filePath)) return true;
_logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB", filePath);
_logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB/PDF", filePath);
return false;
}
public int GetNumberOfPages(string filePath)
{
if (!IsValidFile(filePath) || !Parser.Parser.IsEpub(filePath)) return 0;
if (!IsValidFile(filePath)) return 0;
try
{
using var epubBook = EpubReader.OpenBook(filePath);
return epubBook.Content.Html.Count;
if (Parser.Parser.IsPdf(filePath))
{
using var docReader = DocLib.Instance.GetDocReader(filePath, new PageDimensions(1080, 1920));
return docReader.GetPageCount();
}
using var epubBook = EpubReader.OpenBook(filePath);
return epubBook.Content.Html.Count;
}
catch (Exception ex)
{
@ -233,14 +249,16 @@ namespace API.Services
/// <summary>
/// Parses out Title from book. Chapters and Volumes will always be "0". If there is any exception reading book (malformed books)
/// then null is returned.
/// then null is returned. This expects only an epub file
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
public ParserInfo ParseInfo(string filePath)
{
try
{
if (!Parser.Parser.IsEpub(filePath)) return null;
try
{
using var epubBook = EpubReader.OpenBook(filePath);
// If the epub has the following tags, we can group the books as Volumes
@ -303,9 +321,9 @@ namespace API.Services
}
return new ParserInfo()
{
Chapters = "0",
Edition = "",
Format = MangaFormat.Book,
Chapters = Parser.Parser.DefaultChapter,
Edition = string.Empty,
Format = MangaFormat.Epub,
Filename = Path.GetFileName(filePath),
Title = specialName,
FullFilePath = filePath,
@ -322,23 +340,48 @@ namespace API.Services
return new ParserInfo()
{
Chapters = "0",
Edition = "",
Format = MangaFormat.Book,
Chapters = Parser.Parser.DefaultChapter,
Edition = string.Empty,
Format = MangaFormat.Epub,
Filename = Path.GetFileName(filePath),
Title = epubBook.Title,
FullFilePath = filePath,
IsSpecial = false,
Series = epubBook.Title,
Volumes = "0"
Volumes = Parser.Parser.DefaultVolume
};
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath);
}
return null;
return null;
}
private static void AddBytesToBitmap(Bitmap bmp, byte[] rawBytes)
{
var rect = new Rectangle(0, 0, bmp.Width, bmp.Height);
var bmpData = bmp.LockBits(rect, ImageLockMode.WriteOnly, bmp.PixelFormat);
var pNative = bmpData.Scan0;
Marshal.Copy(rawBytes, 0, pNative, rawBytes.Length);
bmp.UnlockBits(bmpData);
}
public void ExtractPdfImages(string fileFilePath, string targetDirectory)
{
DirectoryService.ExistOrCreate(targetDirectory);
using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920));
var pages = docReader.GetPageCount();
using var stream = StreamManager.GetStream("BookService.GetPdfPage");
for (var pageNumber = 0; pageNumber < pages; pageNumber++)
{
GetPdfPage(docReader, pageNumber, stream);
File.WriteAllBytes(Path.Combine(targetDirectory, "Page-" + pageNumber + ".png"), stream.ToArray());
}
}
@ -346,6 +389,11 @@ namespace API.Services
{
if (!IsValidFile(fileFilePath)) return Array.Empty<byte>();
if (Parser.Parser.IsPdf(fileFilePath))
{
return GetPdfCoverImage(fileFilePath, createThumbnail);
}
using var epubBook = EpubReader.OpenBook(fileFilePath);
@ -358,15 +406,12 @@ namespace API.Services
if (coverImageContent == null) return Array.Empty<byte>();
if (createThumbnail)
{
using var stream = new MemoryStream(coverImageContent.ReadContent());
if (!createThumbnail) return coverImageContent.ReadContent();
using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth);
return thumbnail.WriteToBuffer(".jpg");
}
using var stream = StreamManager.GetStream("BookService.GetCoverImage", coverImageContent.ReadContent());
using var thumbnail = Image.ThumbnailStream(stream, MetadataService.ThumbnailWidth);
return thumbnail.WriteToBuffer(".jpg");
return coverImageContent.ReadContent();
}
catch (Exception ex)
{
@ -376,6 +421,50 @@ namespace API.Services
return Array.Empty<byte>();
}
private byte[] GetPdfCoverImage(string fileFilePath, bool createThumbnail)
{
try
{
using var docReader = DocLib.Instance.GetDocReader(fileFilePath, new PageDimensions(1080, 1920));
if (docReader.GetPageCount() == 0) return Array.Empty<byte>();
using var stream = StreamManager.GetStream("BookService.GetPdfPage");
GetPdfPage(docReader, 0, stream);
if (!createThumbnail) return stream.ToArray();
using var thumbnail = Image.ThumbnailStream(stream, MetadataService.ThumbnailWidth);
return thumbnail.WriteToBuffer(".png");
}
catch (Exception ex)
{
_logger.LogWarning(ex,
"[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image",
fileFilePath);
}
return Array.Empty<byte>();
}
private static void GetPdfPage(IDocReader docReader, int pageNumber, Stream stream)
{
using var pageReader = docReader.GetPageReader(pageNumber);
var rawBytes = pageReader.GetImage(new NaiveTransparencyRemover());
var width = pageReader.GetPageWidth();
var height = pageReader.GetPageHeight();
using var bmp = new Bitmap(width, height, PixelFormat.Format32bppArgb);
AddBytesToBitmap(bmp, rawBytes);
// Removes 1px margin on left/right side after bitmap is copied out
for (var y = 0; y < bmp.Height; y++)
{
bmp.SetPixel(bmp.Width - 1, y, bmp.GetPixel(bmp.Width - 2, y));
}
stream.Seek(0, SeekOrigin.Begin);
bmp.Save(stream, ImageFormat.Jpeg);
stream.Seek(0, SeekOrigin.Begin);
}
private static string RemoveWhiteSpaceFromStylesheets(string body)
{
body = Regex.Replace(body, @"[a-zA-Z]+#", "#");

View File

@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
@ -18,16 +19,18 @@ namespace API.Services
private readonly IUnitOfWork _unitOfWork;
private readonly IArchiveService _archiveService;
private readonly IDirectoryService _directoryService;
private readonly IBookService _bookService;
private readonly NumericComparer _numericComparer;
public static readonly string CacheDirectory = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "cache/"));
public CacheService(ILogger<CacheService> logger, IUnitOfWork unitOfWork, IArchiveService archiveService,
IDirectoryService directoryService)
IDirectoryService directoryService, IBookService bookService)
{
_logger = logger;
_unitOfWork = unitOfWork;
_archiveService = archiveService;
_directoryService = directoryService;
_bookService = bookService;
_numericComparer = new NumericComparer();
}
@ -48,21 +51,47 @@ namespace API.Services
var extractPath = GetCachePath(chapterId);
var extraPath = "";
foreach (var file in files)
if (Directory.Exists(extractPath))
{
if (fileCount > 1)
{
extraPath = file.Id + "";
}
if (file.Format == MangaFormat.Archive)
{
_archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath));
}
return chapter;
}
new DirectoryInfo(extractPath).Flatten();
var extractDi = new DirectoryInfo(extractPath);
if (files.Count > 0 && files[0].Format == MangaFormat.Image)
{
DirectoryService.ExistOrCreate(extractPath);
if (files.Count == 1)
{
_directoryService.CopyFileToDirectory(files[0].FilePath, extractPath);
}
else
{
_directoryService.CopyDirectoryToDirectory(Path.GetDirectoryName(files[0].FilePath), extractPath, Parser.Parser.ImageFileExtensions);
}
extractDi.Flatten();
return chapter;
}
foreach (var file in files)
{
if (fileCount > 1)
{
extraPath = file.Id + string.Empty;
}
if (file.Format == MangaFormat.Archive)
{
_archiveService.ExtractArchive(file.FilePath, Path.Join(extractPath, extraPath));
} else if (file.Format == MangaFormat.Pdf)
{
_bookService.ExtractPdfImages(file.FilePath, Path.Join(extractPath, extraPath));
}
}
extractDi.Flatten();
extractDi.RemoveNonImages();
return chapter;
}
@ -73,11 +102,9 @@ namespace API.Services
_logger.LogInformation("Performing cleanup of Cache directory");
EnsureCacheDirectory();
DirectoryInfo di = new DirectoryInfo(CacheDirectory);
try
{
di.Empty();
DirectoryService.ClearDirectory(CacheDirectory);
}
catch (Exception ex)
{
@ -87,7 +114,7 @@ namespace API.Services
_logger.LogInformation("Cache directory purged");
}
public void CleanupChapters(int[] chapterIds)
public void CleanupChapters(IEnumerable<int> chapterIds)
{
_logger.LogInformation("Running Cache cleanup on Volumes");
@ -138,13 +165,19 @@ namespace API.Services
return (files.ElementAt(page - 1 - pagesSoFar), mangaFile);
}
if (mangaFile.Format == MangaFormat.Image && mangaFile.Pages == 1)
{
// Each file is one page, meaning we should just get element at page
return (files.ElementAt(page), mangaFile);
}
return (files.ElementAt(page - pagesSoFar), mangaFile);
}
pagesSoFar += mangaFile.Pages;
}
return ("", null);
return (string.Empty, null);
}
}
}

View File

@ -1,28 +1,22 @@
using System;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
using API.Configurations.CustomOptions;
using API.DTOs;
using API.DTOs.Stats;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace API.Services.Clients
{
public class StatsApiClient
{
private readonly HttpClient _client;
private readonly StatsOptions _options;
private readonly ILogger<StatsApiClient> _logger;
private const string ApiUrl = "http://stats.kavitareader.com";
public StatsApiClient(HttpClient client, IOptions<StatsOptions> options, ILogger<StatsApiClient> logger)
public StatsApiClient(HttpClient client, ILogger<StatsApiClient> logger)
{
_client = client;
_logger = logger;
_options = options.Value ?? throw new ArgumentNullException(nameof(options));
}
public async Task SendDataToStatsServer(UsageStatisticsDto data)

View File

@ -40,8 +40,9 @@ namespace API.Services
reSearchPattern.IsMatch(Path.GetExtension(file)));
}
/// <summary>
/// Returns a list of folders from end of fullPath to rootPath.
/// Returns a list of folders from end of fullPath to rootPath. If a file is passed at the end of the fullPath, it will be ignored.
///
/// Example) (C:/Manga/, C:/Manga/Love Hina/Specials/Omake/) returns [Omake, Specials, Love Hina]
/// </summary>
@ -50,7 +51,7 @@ namespace API.Services
/// <returns></returns>
public static IEnumerable<string> GetFoldersTillRoot(string rootPath, string fullPath)
{
var separator = Path.AltDirectorySeparatorChar;
var separator = Path.AltDirectorySeparatorChar;
if (fullPath.Contains(Path.DirectorySeparatorChar))
{
fullPath = fullPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
@ -61,14 +62,22 @@ namespace API.Services
rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
}
var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath;
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
var paths = new List<string>();
// If a file is at the end of the path, remove it before we start processing folders
if (Path.GetExtension(path) != string.Empty)
{
path = path.Substring(0, path.LastIndexOf(separator));
}
while (Path.GetDirectoryName(path) != Path.GetDirectoryName(root))
{
var folder = new DirectoryInfo(path).Name;
paths.Add(folder);
path = path.Replace(separator + folder, string.Empty);
path = path.Substring(0, path.LastIndexOf(separator));
}
return paths;
@ -95,6 +104,65 @@ namespace API.Services
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path);
}
public void CopyFileToDirectory(string fullFilePath, string targetDirectory)
{
var fileInfo = new FileInfo(fullFilePath);
if (fileInfo.Exists)
{
fileInfo.CopyTo(Path.Join(targetDirectory, fileInfo.Name));
}
}
/// <summary>
/// Copies a Directory with all files and subdirectories to a target location
/// </summary>
/// <param name="sourceDirName"></param>
/// <param name="destDirName"></param>
/// <param name="searchPattern">Defaults to *, meaning all files</param>
/// <returns></returns>
/// <exception cref="DirectoryNotFoundException"></exception>
public bool CopyDirectoryToDirectory(string sourceDirName, string destDirName, string searchPattern = "*")
{
if (string.IsNullOrEmpty(sourceDirName)) return false;
var di = new DirectoryInfo(sourceDirName);
if (!di.Exists) return false;
// Get the subdirectories for the specified directory.
var dir = new DirectoryInfo(sourceDirName);
if (!dir.Exists)
{
throw new DirectoryNotFoundException(
"Source directory does not exist or could not be found: "
+ sourceDirName);
}
var dirs = dir.GetDirectories();
// If the destination directory doesn't exist, create it.
Directory.CreateDirectory(destDirName);
// Get the files in the directory and copy them to the new location.
var files = GetFilesWithExtension(dir.FullName, searchPattern).Select(n => new FileInfo(n));
foreach (var file in files)
{
var tempPath = Path.Combine(destDirName, file.Name);
file.CopyTo(tempPath, false);
}
// If copying subdirectories, copy them and their contents to new location.
foreach (var subDir in dirs)
{
var tempPath = Path.Combine(destDirName, subDir.Name);
CopyDirectoryToDirectory(subDir.FullName, tempPath);
}
return true;
}
public string[] GetFilesWithExtension(string path, string searchPatternExpression = "")
{
if (searchPatternExpression != string.Empty)

View File

@ -0,0 +1,71 @@
using System;
using System.IO;
using System.Linq;
using API.Comparators;
using API.Entities;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
using NetVips;
namespace API.Services
{
public class ImageService : IImageService
{
private readonly ILogger<ImageService> _logger;
private readonly IDirectoryService _directoryService;
private readonly NaturalSortComparer _naturalSortComparer;
public ImageService(ILogger<ImageService> logger, IDirectoryService directoryService)
{
_logger = logger;
_directoryService = directoryService;
_naturalSortComparer = new NaturalSortComparer();
}
/// <summary>
/// Finds the first image in the directory of the first file. Does not check for "cover/folder".ext files to override.
/// </summary>
/// <param name="file"></param>
/// <returns></returns>
public string GetCoverFile(MangaFile file)
{
var directory = Path.GetDirectoryName(file.FilePath);
if (string.IsNullOrEmpty(directory))
{
_logger.LogError("Could not find Directory for {File}", file.FilePath);
return null;
}
var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions)
.OrderBy(f => f, _naturalSortComparer).FirstOrDefault();
return firstImage;
}
public byte[] GetCoverImage(string path, bool createThumbnail = false)
{
if (string.IsNullOrEmpty(path)) return Array.Empty<byte>();
try
{
if (createThumbnail)
{
using var thumbnail = Image.Thumbnail(path, MetadataService.ThumbnailWidth);
return thumbnail.WriteToBuffer(".jpg");
}
using var img = Image.NewFromFile(path);
using var stream = new MemoryStream();
img.JpegsaveStream(stream);
return stream.ToArray();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "[GetCoverImage] There was an error and prevented thumbnail generation on {ImageFile}. Defaulting to no cover image", path);
}
return Array.Empty<byte>();
}
}
}

View File

@ -16,185 +16,191 @@ namespace API.Services
{
public class MetadataService : IMetadataService
{
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<MetadataService> _logger;
private readonly IArchiveService _archiveService;
private readonly IBookService _bookService;
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<MetadataService> _logger;
private readonly IArchiveService _archiveService;
private readonly IBookService _bookService;
private readonly IImageService _imageService;
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
public static readonly int ThumbnailWidth = 320; // 153w x 230h
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger, IArchiveService archiveService, IBookService bookService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_bookService = bookService;
}
public MetadataService(IUnitOfWork unitOfWork, ILogger<MetadataService> logger,
IArchiveService archiveService, IBookService bookService, IImageService imageService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_bookService = bookService;
_imageService = imageService;
}
private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false)
{
return forceUpdate || coverImage == null || !coverImage.Any();
}
private static bool ShouldFindCoverImage(byte[] coverImage, bool forceUpdate = false)
{
return forceUpdate || coverImage == null || !coverImage.Any();
}
private byte[] GetCoverImage(MangaFile file, bool createThumbnail = true)
{
if (file.Format == MangaFormat.Book)
{
return _bookService.GetCoverImage(file.FilePath, createThumbnail);
}
else
{
return _archiveService.GetCoverImage(file.FilePath, createThumbnail);
}
}
private byte[] GetCoverImage(MangaFile file, bool createThumbnail = true)
{
switch (file.Format)
{
case MangaFormat.Pdf:
case MangaFormat.Epub:
return _bookService.GetCoverImage(file.FilePath, createThumbnail);
case MangaFormat.Image:
var coverImage = _imageService.GetCoverFile(file);
return _imageService.GetCoverImage(coverImage, createThumbnail);
case MangaFormat.Archive:
return _archiveService.GetCoverImage(file.FilePath, createThumbnail);
default:
return Array.Empty<byte>();
}
}
public void UpdateMetadata(Chapter chapter, bool forceUpdate)
{
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (ShouldFindCoverImage(chapter.CoverImage, forceUpdate) && firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified))
{
chapter.Files ??= new List<MangaFile>();
chapter.CoverImage = GetCoverImage(firstFile);
}
}
public void UpdateMetadata(Chapter chapter, bool forceUpdate)
{
var firstFile = chapter.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (ShouldFindCoverImage(chapter.CoverImage, forceUpdate) && firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified))
{
chapter.Files ??= new List<MangaFile>();
chapter.CoverImage = GetCoverImage(firstFile);
}
}
public void UpdateMetadata(Volume volume, bool forceUpdate)
{
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
{
volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).FirstOrDefault();
public void UpdateMetadata(Volume volume, bool forceUpdate)
{
if (volume == null || !ShouldFindCoverImage(volume.CoverImage, forceUpdate)) return;
// Skip calculating Cover Image (I/O) if the chapter already has it set
if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage))
{
volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).FirstOrDefault();
// Skip calculating Cover Image (I/O) if the chapter already has it set
if (firstChapter == null || ShouldFindCoverImage(firstChapter.CoverImage, forceUpdate))
{
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
if (firstFile != null && !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified))
{
volume.CoverImage = GetCoverImage(firstFile);
volume.CoverImage = GetCoverImage(firstFile);
}
}
else
{
}
else
{
volume.CoverImage = firstChapter.CoverImage;
}
}
}
}
}
public void UpdateMetadata(Series series, bool forceUpdate)
{
if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
{
series.Volumes ??= new List<Volume>();
var firstCover = series.Volumes.GetCoverImage(series.Library.Type);
byte[] coverImage = null;
if (firstCover == null && series.Volumes.Any())
{
// If firstCover is null and one volume, the whole series is Chapters under Vol 0.
if (series.Volumes.Count == 1)
public void UpdateMetadata(Series series, bool forceUpdate)
{
if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
{
series.Volumes ??= new List<Volume>();
var firstCover = series.Volumes.GetCoverImage(series.Format);
byte[] coverImage = null;
if (firstCover == null && series.Volumes.Any())
{
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer)
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
// If firstCover is null and one volume, the whole series is Chapters under Vol 0.
if (series.Volumes.Count == 1)
{
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer)
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
}
if (coverImage == null)
{
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer)
.FirstOrDefault()?.CoverImage;
}
}
series.CoverImage = firstCover?.CoverImage ?? coverImage;
}
if (coverImage == null)
{
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number), _chapterSortComparer)
.FirstOrDefault()?.CoverImage;
}
}
series.CoverImage = firstCover?.CoverImage ?? coverImage;
}
UpdateSeriesSummary(series, forceUpdate);
}
UpdateSeriesSummary(series, forceUpdate);
}
private void UpdateSeriesSummary(Series series, bool forceUpdate)
{
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
private void UpdateSeriesSummary(Series series, bool forceUpdate)
{
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
var isBook = series.Library.Type == LibraryType.Book;
var firstVolume = series.Volumes.FirstWithChapters(isBook);
var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles();
var isBook = series.Library.Type == LibraryType.Book;
var firstVolume = series.Volumes.FirstWithChapters(isBook);
var firstChapter = firstVolume?.Chapters.GetFirstChapterWithFiles();
var firstFile = firstChapter?.Files.FirstOrDefault();
if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return;
if (Parser.Parser.IsPdf(firstFile.FilePath)) return;
// NOTE: This suffers from code changes not taking effect due to stale data
var firstFile = firstChapter?.Files.FirstOrDefault();
if (firstFile != null &&
(forceUpdate || firstFile.HasFileBeenModified())) // !new FileInfo(firstFile.FilePath).IsLastWriteLessThan(firstFile.LastModified)
{
var summary = isBook ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath);
if (string.IsNullOrEmpty(series.Summary))
{
var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath);
if (string.IsNullOrEmpty(series.Summary))
{
series.Summary = summary;
}
}
firstFile.LastModified = DateTime.Now;
}
}
firstFile.LastModified = DateTime.Now;
}
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
// TODO: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
foreach (var series in library.Series)
{
foreach (var volume in series.Volumes)
{
// TODO: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
foreach (var series in library.Series)
{
foreach (var volume in series.Volumes)
{
foreach (var chapter in volume.Chapters)
{
UpdateMetadata(chapter, forceUpdate);
}
UpdateMetadata(volume, forceUpdate);
}
UpdateMetadata(series, forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
}
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
{
_logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
}
}
public void RefreshMetadataForSeries(int libraryId, int seriesId)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
var series = library.Series.SingleOrDefault(s => s.Id == seriesId);
if (series == null)
{
_logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId);
return;
}
_logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name);
foreach (var volume in series.Volumes)
{
foreach (var chapter in volume.Chapters)
{
UpdateMetadata(chapter, forceUpdate);
UpdateMetadata(chapter, true);
}
UpdateMetadata(volume, forceUpdate);
}
UpdateMetadata(volume, true);
}
UpdateMetadata(series, forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
}
UpdateMetadata(series, true);
_unitOfWork.SeriesRepository.Update(series);
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
{
_logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
}
}
public void RefreshMetadataForSeries(int libraryId, int seriesId)
{
var sw = Stopwatch.StartNew();
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
var series = library.Series.SingleOrDefault(s => s.Id == seriesId);
if (series == null)
{
_logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId);
return;
}
_logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name);
foreach (var volume in series.Volumes)
{
foreach (var chapter in volume.Chapters)
{
UpdateMetadata(chapter, true);
}
UpdateMetadata(volume, true);
}
UpdateMetadata(series, true);
_unitOfWork.SeriesRepository.Update(series);
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
{
_logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
}
}
if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result)
{
_logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
}
}
}
}

View File

@ -1,4 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using API.Entities.Enums;
using API.Helpers.Converters;
@ -112,19 +113,25 @@ namespace API.Services
public void RefreshMetadata(int libraryId, bool forceUpdate = true)
{
_logger.LogInformation("Enqueuing library metadata refresh for: {LibraryId}", libraryId);
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadata(libraryId, forceUpdate)));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
}
public void CleanupTemp()
{
var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp");
BackgroundJob.Enqueue((() => DirectoryService.ClearDirectory(tempDirectory)));
BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory));
}
public void RefreshSeriesMetadata(int libraryId, int seriesId)
{
_logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId);
BackgroundJob.Enqueue((() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId)));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId));
}
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
{
_logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId);
BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, forceUpdate, CancellationToken.None));
}
public void BackupDatabase()

View File

@ -0,0 +1,208 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using API.Entities;
using API.Entities.Enums;
using API.Interfaces.Services;
using API.Parser;
using Microsoft.Extensions.Logging;
namespace API.Services.Tasks.Scanner
{
public class ParsedSeries
{
public string Name { get; init; }
public string NormalizedName { get; init; }
public MangaFormat Format { get; init; }
}
public class ParseScannedFiles
{
private readonly ConcurrentDictionary<ParsedSeries, List<ParserInfo>> _scannedSeries;
private readonly IBookService _bookService;
private readonly ILogger _logger;
/// <summary>
/// An instance of a pipeline for processing files and returning a Map of Series -> ParserInfos.
/// Each instance is separate from other threads, allowing for no cross over.
/// </summary>
/// <param name="bookService"></param>
/// <param name="logger"></param>
public ParseScannedFiles(IBookService bookService, ILogger logger)
{
_bookService = bookService;
_logger = logger;
_scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
}
public static IList<ParserInfo> GetInfosByName(Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, Series series)
{
var existingKey = parsedSeries.Keys.FirstOrDefault(ps =>
ps.Format == series.Format && ps.NormalizedName == Parser.Parser.Normalize(series.OriginalName));
existingKey ??= new ParsedSeries()
{
Format = series.Format,
Name = series.OriginalName,
NormalizedName = Parser.Parser.Normalize(series.OriginalName)
};
return parsedSeries[existingKey];
}
/// <summary>
/// Processes files found during a library scan.
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.
/// </summary>
/// <param name="path">Path of a file</param>
/// <param name="rootPath"></param>
/// <param name="type">Library type to determine parsing to perform</param>
private void ProcessFile(string path, string rootPath, LibraryType type)
{
ParserInfo info;
if (Parser.Parser.IsEpub(path))
{
info = _bookService.ParseInfo(path);
}
else
{
info = Parser.Parser.Parse(path, rootPath, type);
}
if (info == null)
{
_logger.LogWarning("[Scanner] Could not parse series from {Path}", path);
return;
}
if (Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume)
{
info = Parser.Parser.Parse(path, rootPath, type);
var info2 = _bookService.ParseInfo(path);
info.Merge(info2);
}
TrackSeries(info);
}
/// <summary>
/// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
/// This will check if the name matches an existing series name (multiple fields) <see cref="MergeName"/>
/// </summary>
/// <param name="info"></param>
private void TrackSeries(ParserInfo info)
{
if (info.Series == string.Empty) return;
// Check if normalized info.Series already exists and if so, update info to use that name instead
info.Series = MergeName(info);
var existingKey = _scannedSeries.Keys.FirstOrDefault(ps =>
ps.Format == info.Format && ps.NormalizedName == Parser.Parser.Normalize(info.Series));
existingKey ??= new ParsedSeries()
{
Format = info.Format,
Name = info.Series,
NormalizedName = Parser.Parser.Normalize(info.Series)
};
_scannedSeries.AddOrUpdate(existingKey, new List<ParserInfo>() {info}, (_, oldValue) =>
{
oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info))
{
oldValue.Add(info);
}
return oldValue;
});
}
/// <summary>
/// Using a normalized name from the passed ParserInfo, this checks against all found series so far and if an existing one exists with
/// same normalized name, it merges into the existing one. This is important as some manga may have a slight difference with punctuation or capitalization.
/// </summary>
/// <param name="info"></param>
/// <returns></returns>
public string MergeName(ParserInfo info)
{
var normalizedSeries = Parser.Parser.Normalize(info.Series);
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName =
_scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key.NormalizedName) == normalizedSeries && p.Key.Format == info.Format)
.Key;
if (existingName != null && !string.IsNullOrEmpty(existingName.Name))
{
_logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName.Name);
return existingName.Name;
}
return info.Series;
}
/// <summary>
///
/// </summary>
/// <param name="libraryType">Type of library. Used for selecting the correct file extensions to search for and parsing files</param>
/// <param name="folders">The folders to scan. By default, this should be library.Folders, however it can be overwritten to restrict folders</param>
/// <param name="totalFiles">Total files scanned</param>
/// <param name="scanElapsedTime">Time it took to scan and parse files</param>
/// <returns></returns>
public Dictionary<ParsedSeries, List<ParserInfo>> ScanLibrariesForSeries(LibraryType libraryType, IEnumerable<string> folders, out int totalFiles,
out long scanElapsedTime)
{
var sw = Stopwatch.StartNew();
totalFiles = 0;
var searchPattern = GetLibrarySearchPattern();
foreach (var folderPath in folders)
{
try
{
totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath, (f) =>
{
try
{
ProcessFile(f, folderPath, libraryType);
}
catch (FileNotFoundException exception)
{
_logger.LogError(exception, "The file {Filename} could not be found", f);
}
}, searchPattern, _logger);
}
catch (ArgumentException ex)
{
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath);
}
}
scanElapsedTime = sw.ElapsedMilliseconds;
_logger.LogInformation("Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles,
scanElapsedTime);
return SeriesWithInfos();
}
private static string GetLibrarySearchPattern()
{
return Parser.Parser.SupportedExtensions;
}
/// <summary>
/// Returns any series where there were parsed infos
/// </summary>
/// <returns></returns>
private Dictionary<ParsedSeries, List<ParserInfo>> SeriesWithInfos()
{
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count > 0);
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
return series;
}
}
}

View File

@ -1,9 +1,9 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Comparators;
using API.Data;
@ -13,6 +13,7 @@ using API.Extensions;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
using API.Services.Tasks.Scanner;
using Hangfire;
using Microsoft.Extensions.Logging;
@ -25,18 +26,99 @@ namespace API.Services.Tasks
private readonly IArchiveService _archiveService;
private readonly IMetadataService _metadataService;
private readonly IBookService _bookService;
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
private readonly NaturalSortComparer _naturalSort;
private readonly ICacheService _cacheService;
private readonly NaturalSortComparer _naturalSort = new ();
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
IMetadataService metadataService, IBookService bookService)
IMetadataService metadataService, IBookService bookService, ICacheService cacheService)
{
_unitOfWork = unitOfWork;
_logger = logger;
_archiveService = archiveService;
_metadataService = metadataService;
_bookService = bookService;
_naturalSort = new NaturalSortComparer();
_cacheService = cacheService;
}
[DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token)
{
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId, seriesId);
var dirs = FindHighestDirectoriesFromFiles(library, files);
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{ seriesId });
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanner = new ParseScannedFiles(_bookService, _logger);
var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime);
// If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
// Hence we clear out anything but what we selected for
var firstSeries = library.Series.FirstOrDefault();
var keys = parsedSeries.Keys;
foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
{
parsedSeries.Remove(key);
}
var sw = new Stopwatch();
UpdateLibrary(library, parsedSeries);
_unitOfWork.LibraryRepository.Update(library);
if (await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
CleanupUserProgress();
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
}
else
{
_logger.LogCritical(
"There was a critical error that resulted in a failed scan. Please check logs and rescan");
await _unitOfWork.RollbackAsync();
}
}
/// <summary>
/// Finds the highest directories from a set of MangaFiles
/// </summary>
/// <param name="library"></param>
/// <param name="files"></param>
/// <returns></returns>
private static Dictionary<string, string> FindHighestDirectoriesFromFiles(Library library, IList<MangaFile> files)
{
var stopLookingForDirectories = false;
var dirs = new Dictionary<string, string>();
foreach (var folder in library.Folders)
{
if (stopLookingForDirectories) break;
foreach (var file in files)
{
if (!file.FilePath.Contains(folder.Path)) continue;
var parts = DirectoryService.GetFoldersTillRoot(folder.Path, file.FilePath).ToList();
if (parts.Count == 0)
{
// Break from all loops, we done, just scan folder.Path (library root)
dirs.Add(folder.Path, string.Empty);
stopLookingForDirectories = true;
break;
}
var fullPath = Path.Join(folder.Path, parts.Last());
if (!dirs.ContainsKey(fullPath))
{
dirs.Add(fullPath, string.Empty);
}
}
}
return dirs;
}
@ -51,51 +133,47 @@ namespace API.Services.Tasks
}
}
private bool ShouldSkipFolderScan(FolderPath folder, ref int skippedFolders)
{
// NOTE: The only way to skip folders is if Directory hasn't been modified, we aren't doing a forcedUpdate and version hasn't changed between scans.
return false;
// if (!_forceUpdate && Directory.GetLastWriteTime(folder.Path) < folder.LastScanned)
// {
// _logger.LogDebug("{FolderPath} hasn't been modified since last scan. Skipping", folder.Path);
// skippedFolders += 1;
// return true;
// }
//return false;
}
[DisableConcurrentExecution(360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate)
{
var sw = Stopwatch.StartNew();
_scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
Library library;
Library library;
try
{
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter()
.GetResult();
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
_logger.LogError(ex, "There was an issue fetching Library {LibraryId}", libraryId);
return;
// This usually only fails if user is not authenticated.
_logger.LogError(ex, "There was an issue fetching Library {LibraryId}", libraryId);
return;
}
_logger.LogInformation("Beginning file scan on {LibraryName}", library.Name);
var scanner = new ParseScannedFiles(_bookService, _logger);
var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime);
foreach (var folderPath in library.Folders)
{
folderPath.LastScanned = DateTime.Now;
}
var sw = Stopwatch.StartNew();
var series = ScanLibrariesForSeries(forceUpdate, library, sw, out var totalFiles, out var scanElapsedTime);
UpdateLibrary(library, series);
_unitOfWork.LibraryRepository.Update(library);
if (Task.Run(() => _unitOfWork.CommitAsync()).Result)
{
_logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
_logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}",
totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
}
else
{
_logger.LogCritical("There was a critical error that resulted in a failed scan. Please check logs and rescan");
_logger.LogCritical(
"There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
CleanupUserProgress();
@ -112,77 +190,7 @@ namespace API.Services.Tasks
_logger.LogInformation("Removed {Count} abandoned progress rows", cleanedUp);
}
private Dictionary<string, List<ParserInfo>> ScanLibrariesForSeries(bool forceUpdate, Library library, Stopwatch sw, out int totalFiles,
out long scanElapsedTime)
{
_logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name,
forceUpdate);
totalFiles = 0;
var skippedFolders = 0;
foreach (var folderPath in library.Folders)
{
if (ShouldSkipFolderScan(folderPath, ref skippedFolders)) continue;
// NOTE: we can refactor this to allow all filetypes and handle everything in the ProcessFile to allow mixed library types.
var searchPattern = Parser.Parser.ArchiveFileExtensions;
if (library.Type == LibraryType.Book)
{
searchPattern = Parser.Parser.BookFileExtensions;
}
try
{
totalFiles += DirectoryService.TraverseTreeParallelForEach(folderPath.Path, (f) =>
{
try
{
ProcessFile(f, folderPath.Path, library.Type);
}
catch (FileNotFoundException exception)
{
_logger.LogError(exception, "The file {Filename} could not be found", f);
}
}, searchPattern, _logger);
}
catch (ArgumentException ex)
{
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path);
}
folderPath.LastScanned = DateTime.Now;
}
scanElapsedTime = sw.ElapsedMilliseconds;
_logger.LogInformation("Folders Scanned {TotalFiles} files in {ElapsedScanTime} milliseconds", totalFiles,
scanElapsedTime);
sw.Restart();
if (skippedFolders == library.Folders.Count)
{
_logger.LogInformation("All Folders were skipped due to no modifications to the directories");
_unitOfWork.LibraryRepository.Update(library);
_scannedSeries = null;
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}",
totalFiles, sw.ElapsedMilliseconds, library.Name);
return new Dictionary<string, List<ParserInfo>>();
}
return SeriesWithInfos(_scannedSeries);
}
/// <summary>
/// Returns any series where there were parsed infos
/// </summary>
/// <param name="scannedSeries"></param>
/// <returns></returns>
private static Dictionary<string, List<ParserInfo>> SeriesWithInfos(IDictionary<string, List<ParserInfo>> scannedSeries)
{
var filtered = scannedSeries.Where(kvp => kvp.Value.Count > 0);
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
return series;
}
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
private void UpdateLibrary(Library library, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries));
@ -194,28 +202,28 @@ namespace API.Services.Tasks
_logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount);
foreach (var s in missingSeries)
{
_logger.LogDebug("Removed {SeriesName}", s.Name);
_logger.LogDebug("Removed {SeriesName} ({Format})", s.Name, s.Format);
}
}
// Add new series that have parsedInfos
foreach (var (key, infos) in parsedSeries)
{
// Key is normalized already
// Key is normalized already
Series existingSeries;
try
{
existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key);
existingSeries = library.Series.SingleOrDefault(s =>
(s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName)
&& (s.Format == key.Format || s.Format == MangaFormat.Unknown));
}
catch (Exception e)
{
_logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key);
var duplicateSeries = library.Series.Where(s => s.NormalizedName == key || Parser.Parser.Normalize(s.OriginalName) == key).ToList();
_logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key.NormalizedName);
var duplicateSeries = library.Series.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
foreach (var series in duplicateSeries)
{
_logger.LogCritical("{Key} maps with {Series}", key, series.OriginalName);
_logger.LogCritical("{Key} maps with {Series}", key.Name, series.OriginalName);
}
continue;
@ -223,12 +231,14 @@ namespace API.Services.Tasks
if (existingSeries == null)
{
existingSeries = DbFactory.Series(infos[0].Series);
existingSeries.Format = key.Format;
library.Series.Add(existingSeries);
}
existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name);
existingSeries.OriginalName ??= infos[0].Series;
existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>());
existingSeries.Format = key.Format;
}
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
@ -238,9 +248,8 @@ namespace API.Services.Tasks
try
{
_logger.LogInformation("Processing series {SeriesName}", series.OriginalName);
UpdateVolumes(series, parsedSeries[Parser.Parser.Normalize(series.OriginalName)].ToArray());
UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
// Test
}
catch (Exception ex)
{
@ -249,10 +258,15 @@ namespace API.Services.Tasks
});
}
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<string, List<ParserInfo>> parsedSeries)
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
var foundSeries = parsedSeries.Select(s => s.Key).ToList();
return existingSeries.Where(es => !es.NameInList(foundSeries));
// It is safe to check only first since Parser ensures that a Series only has one type
var format = MangaFormat.Unknown;
var firstPs = parsedSeries.Keys.DistinctBy(ps => ps.Format).FirstOrDefault();
if (firstPs != null) format = firstPs.Format;
var foundSeries = parsedSeries.Select(s => s.Key.Name).ToList();
return existingSeries.Where(es => !es.NameInList(foundSeries) || es.Format != format);
}
/// <summary>
@ -270,7 +284,7 @@ namespace API.Services.Tasks
existingSeries = existingSeries.Where(
s => !missingList.Exists(
m => m.NormalizedName.Equals(s.NormalizedName))).ToList();
m => m.NormalizedName.Equals(s.NormalizedName) && m.Format == s.Format)).ToList();
removeCount = existingCount - existingSeries.Count;
@ -292,8 +306,6 @@ namespace API.Services.Tasks
series.Volumes.Add(volume);
}
// NOTE: Instead of creating and adding? Why Not Merge a new volume into an existing, so no matter what, new properties,etc get propagated?
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
UpdateChapters(volume, infos);
@ -401,81 +413,6 @@ namespace API.Services.Tasks
}
}
/// <summary>
/// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
/// </summary>
/// <param name="info"></param>
private void TrackSeries(ParserInfo info)
{
if (info.Series == string.Empty) return;
// Check if normalized info.Series already exists and if so, update info to use that name instead
info.Series = MergeName(_scannedSeries, info);
_scannedSeries.AddOrUpdate(Parser.Parser.Normalize(info.Series), new List<ParserInfo>() {info}, (_, oldValue) =>
{
oldValue ??= new List<ParserInfo>();
if (!oldValue.Contains(info))
{
oldValue.Add(info);
}
return oldValue;
});
}
public string MergeName(ConcurrentDictionary<string,List<ParserInfo>> collectedSeries, ParserInfo info)
{
var normalizedSeries = Parser.Parser.Normalize(info.Series);
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName = collectedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
.Key;
// BUG: We are comparing info.Series against a normalized string. They should never match. (This can cause series to not delete or parse correctly after a rename)
if (!string.IsNullOrEmpty(existingName)) // && info.Series != existingName
{
_logger.LogDebug("Found duplicate parsed infos, merged {Original} into {Merged}", info.Series, existingName);
return existingName;
}
return info.Series;
}
/// <summary>
/// Processes files found during a library scan.
/// Populates a collection of <see cref="ParserInfo"/> for DB updates later.
/// </summary>
/// <param name="path">Path of a file</param>
/// <param name="rootPath"></param>
/// <param name="type">Library type to determine parsing to perform</param>
private void ProcessFile(string path, string rootPath, LibraryType type)
{
ParserInfo info;
if (type == LibraryType.Book && Parser.Parser.IsEpub(path))
{
info = _bookService.ParseInfo(path);
}
else
{
info = Parser.Parser.Parse(path, rootPath, type);
}
if (info == null)
{
_logger.LogWarning("[Scanner] Could not parse series from {Path}", path);
return;
}
if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume)
{
info = Parser.Parser.Parse(path, rootPath, type);
var info2 = _bookService.ParseInfo(path);
info.Merge(info2);
}
TrackSeries(info);
}
private MangaFile CreateMangaFile(ParserInfo info)
{
switch (info.Format)
@ -489,7 +426,8 @@ namespace API.Services.Tasks
Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath)
};
}
case MangaFormat.Book:
case MangaFormat.Pdf:
case MangaFormat.Epub:
{
return new MangaFile()
{
@ -498,6 +436,15 @@ namespace API.Services.Tasks
Pages = _bookService.GetNumberOfPages(info.FullFilePath)
};
}
case MangaFormat.Image:
{
return new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = 1
};
}
default:
_logger.LogWarning("[Scanner] Ignoring {Filename}. Non-archives are not supported", info.Filename);
break;
@ -513,9 +460,9 @@ namespace API.Services.Tasks
if (existingFile != null)
{
existingFile.Format = info.Format;
if (!existingFile.HasFileBeenModified() && existingFile.Pages > 0)
if (existingFile.HasFileBeenModified() || existingFile.Pages == 0)
{
existingFile.Pages = existingFile.Format == MangaFormat.Book
existingFile.Pages = (existingFile.Format == MangaFormat.Epub || existingFile.Format == MangaFormat.Pdf)
? _bookService.GetNumberOfPages(info.FullFilePath)
: _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
}

View File

@ -6,7 +6,7 @@ using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.DTOs;
using API.DTOs.Stats;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services.Clients;
@ -15,7 +15,7 @@ using Kavita.Common.EnvironmentInfo;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Services
namespace API.Services.Tasks
{
public class StatsService : IStatsService
{
@ -133,7 +133,7 @@ namespace API.Services
return usageInfo;
}
private static ServerInfoDto GetServerInfo()
public static ServerInfoDto GetServerInfo()
{
var serverInfo = new ServerInfoDto
{
@ -142,7 +142,9 @@ namespace API.Services
RunTimeVersion = RuntimeInformation.FrameworkDescription,
KavitaVersion = BuildInfo.Version.ToString(),
Culture = Thread.CurrentThread.CurrentCulture.Name,
BuildBranch = BuildInfo.Branch
BuildBranch = BuildInfo.Branch,
IsDocker = new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker,
NumOfCores = Environment.ProcessorCount
};
return serverInfo;

View File

@ -101,7 +101,11 @@ namespace API
// Ordering is important. Cors, authentication, authorization
if (env.IsDevelopment())
{
app.UseCors(policy => policy.AllowAnyHeader().AllowAnyMethod().WithOrigins("http://localhost:4200"));
app.UseCors(policy => policy
.AllowAnyHeader()
.AllowAnyMethod()
.WithOrigins("http://localhost:4200")
.WithExposedHeaders("Content-Disposition", "Pagination"));
}
app.UseResponseCaching();
@ -145,7 +149,7 @@ namespace API
});
}
private void OnShutdown()
private static void OnShutdown()
{
Console.WriteLine("Server is shutting down. Please allow a few seconds to stop any background jobs...");
TaskScheduler.Client.Dispose();

View File

@ -19,12 +19,11 @@ Setup guides, FAQ, the more information we have on the [wiki](https://github.com
1. Fork Kavita
2. Clone the repository into your development machine. [*info*](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository-from-github)
- Kavita as of v0.4.2 requires Kavita-webui to be cloned next to the Kavita. Fork and clone this as well.
3. Install the required Node Packages
- cd kavita-webui
- cd Kavita/UI/Web
- `npm install`
- `npm install -g @angular/cli`
4. Start webui server `ng serve`
4. Start angular server `ng serve`
5. Build the project in Visual Studio/Rider, Setting startup project to `API`
6. Debug the project in Visual Studio/Rider
7. Open http://localhost:4200
@ -41,10 +40,10 @@ Setup guides, FAQ, the more information we have on the [wiki](https://github.com
- Commit with *nix line endings for consistency (We checkout Windows and commit *nix)
- One feature/bug fix per pull request to keep things clean and easy to understand
- Use 4 spaces instead of tabs, this is the default for VS 2019 and WebStorm (to my knowledge)
- Use 2 spaces for Kavita-webui files
- Use 2 spaces for UI files
### Pull Requesting ###
- Only make pull requests to develop, never master, if you make a PR to master we'll comment on it and close it
- Only make pull requests to develop, never main, if you make a PR to main we'll comment on it and close it
- You're probably going to get some comments or questions from us, they will be to ensure consistency and maintainability
- We'll try to respond to pull requests as soon as possible, if its been a day or two, please reach out to us, we may have missed it
- Each PR should come from its own [feature branch](http://martinfowler.com/bliki/FeatureBranch.html) not develop in your fork, it should have a meaningful branch name (what is being added/fixed)
@ -52,5 +51,7 @@ Setup guides, FAQ, the more information we have on the [wiki](https://github.com
- fix-bug (Good)
- patch (Bad)
- develop (Bad)
- feature/parser-enhancements (Great)
- bugfix/book-issues (Great)
If you have any questions about any of this, please let us know.

View File

@ -8,7 +8,7 @@ ARG TARGETPLATFORM
#Move the output files to where they need to be
RUN mkdir /files
COPY _output/*.tar.gz /files/
COPY Kavita-webui/dist /files/wwwroot
COPY UI/Web/dist /files/wwwroot
COPY copy_runtime.sh /copy_runtime.sh
RUN /copy_runtime.sh
@ -20,7 +20,7 @@ COPY --from=copytask /files/wwwroot /kavita/wwwroot
#Installs program dependencies
RUN apt-get update \
&& apt-get install -y libicu-dev libssl1.1 pwgen \
&& apt-get install -y libicu-dev libssl1.1 pwgen libgdiplus \
&& rm -rf /var/lib/apt/lists/*
#Creates the data directory

View File

@ -2,133 +2,259 @@
using System.IO;
using System.Text.Json;
using Kavita.Common.EnvironmentInfo;
using Microsoft.Extensions.Hosting;
namespace Kavita.Common
{
public static class Configuration
{
#region JWT Token
public static bool CheckIfJwtTokenSet(string filePath)
{
try {
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
const string key = "TokenKey";
public static class Configuration
{
private static string AppSettingsFilename = GetAppSettingFilename();
public static string Branch
{
get => GetBranch(GetAppSettingFilename());
set => SetBranch(GetAppSettingFilename(), value);
}
if (jsonObj.TryGetProperty(key, out JsonElement tokenElement))
{
return tokenElement.GetString() != "super secret unguessable key";
}
public static int Port
{
get => GetPort(GetAppSettingFilename());
set => SetPort(GetAppSettingFilename(), value);
}
return false;
public static string JwtToken
{
get => GetJwtToken(GetAppSettingFilename());
set => SetJwtToken(GetAppSettingFilename(), value);
}
}
catch (Exception ex) {
Console.WriteLine("Error writing app settings: " + ex.Message);
public static string LogLevel
{
get => GetLogLevel(GetAppSettingFilename());
set => SetLogLevel(GetAppSettingFilename(), value);
}
private static string GetAppSettingFilename()
{
if (!string.IsNullOrEmpty(AppSettingsFilename))
{
return AppSettingsFilename;
}
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
var isDevelopment = environment == Environments.Development;
return "appsettings" + (isDevelopment ? ".Development" : "") + ".json";
}
#region JWT Token
private static string GetJwtToken(string filePath)
{
try
{
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
const string key = "TokenKey";
if (jsonObj.TryGetProperty(key, out JsonElement tokenElement))
{
return tokenElement.GetString();
}
return string.Empty;
}
catch (Exception ex)
{
Console.WriteLine("Error reading app settings: " + ex.Message);
}
return string.Empty;
}
private static bool SetJwtToken(string filePath, string token)
{
try
{
var currentToken = GetJwtToken(filePath);
var json = File.ReadAllText(filePath)
.Replace("\"TokenKey\": \"" + currentToken, "\"TokenKey\": \"" + token);
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
public static bool UpdateJwtToken(string filePath, string token)
{
try
{
var json = File.ReadAllText(filePath).Replace("super secret unguessable key", token);
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
#endregion
#region Port
public static bool UpdatePort(string filePath, int port)
{
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
{
return true;
}
}
}
try
{
var currentPort = GetPort(filePath);
var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port);
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
public static int GetPort(string filePath)
{
const int defaultPort = 5000;
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
{
return defaultPort;
}
public static bool CheckIfJwtTokenSet()
{
//string filePath
try
{
return GetJwtToken(GetAppSettingFilename()) != "super secret unguessable key";
}
catch (Exception ex)
{
Console.WriteLine("Error writing app settings: " + ex.Message);
}
try {
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
const string key = "Port";
return false;
}
if (jsonObj.TryGetProperty(key, out JsonElement tokenElement))
{
return tokenElement.GetInt32();
}
}
catch (Exception ex) {
Console.WriteLine("Error writing app settings: " + ex.Message);
}
public static bool UpdateJwtToken(string token)
{
try
{
var filePath = GetAppSettingFilename();
var json = File.ReadAllText(filePath).Replace("super secret unguessable key", token);
File.WriteAllText(GetAppSettingFilename(), json);
return true;
}
catch (Exception)
{
return false;
}
}
#endregion
#region Port
public static bool SetPort(string filePath, int port)
{
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
{
return true;
}
try
{
var currentPort = GetPort(filePath);
var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port);
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
public static int GetPort(string filePath)
{
Console.WriteLine(GetAppSettingFilename());
const int defaultPort = 5000;
if (new OsInfo(Array.Empty<IOsVersionAdapter>()).IsDocker)
{
return defaultPort;
}
#endregion
#region LogLevel
public static bool UpdateLogLevel(string filePath, string logLevel)
{
try
{
var currentLevel = GetLogLevel(filePath);
var json = File.ReadAllText(filePath).Replace($"\"Default\": \"{currentLevel}\"", $"\"Default\": \"{logLevel}\"");
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
public static string GetLogLevel(string filePath)
{
try {
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement))
{
foreach (var property in tokenElement.EnumerateObject())
{
if (!property.Name.Equals("LogLevel")) continue;
foreach (var logProperty in property.Value.EnumerateObject())
{
if (logProperty.Name.Equals("Default"))
{
return logProperty.Value.GetString();
}
}
}
}
}
catch (Exception ex) {
Console.WriteLine("Error writing app settings: " + ex.Message);
}
}
return "Information";
}
#endregion
}
try
{
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
const string key = "Port";
if (jsonObj.TryGetProperty(key, out JsonElement tokenElement))
{
return tokenElement.GetInt32();
}
}
catch (Exception ex)
{
Console.WriteLine("Error writing app settings: " + ex.Message);
}
return defaultPort;
}
#endregion
#region LogLevel
public static bool SetLogLevel(string filePath, string logLevel)
{
try
{
var currentLevel = GetLogLevel(filePath);
var json = File.ReadAllText(filePath)
.Replace($"\"Default\": \"{currentLevel}\"", $"\"Default\": \"{logLevel}\"");
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
public static string GetLogLevel(string filePath)
{
try
{
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement))
{
foreach (var property in tokenElement.EnumerateObject())
{
if (!property.Name.Equals("LogLevel")) continue;
foreach (var logProperty in property.Value.EnumerateObject())
{
if (logProperty.Name.Equals("Default"))
{
return logProperty.Value.GetString();
}
}
}
}
}
catch (Exception ex)
{
Console.WriteLine("Error writing app settings: " + ex.Message);
}
return "Information";
}
#endregion
public static string GetBranch(string filePath)
{
const string defaultBranch = "main";
try
{
var json = File.ReadAllText(filePath);
var jsonObj = JsonSerializer.Deserialize<dynamic>(json);
const string key = "Branch";
if (jsonObj.TryGetProperty(key, out JsonElement tokenElement))
{
return tokenElement.GetString();
}
}
catch (Exception ex)
{
Console.WriteLine("Error reading app settings: " + ex.Message);
}
return defaultBranch;
}
public static bool SetBranch(string filePath, string updatedBranch)
{
try
{
var currentBranch = GetBranch(filePath);
var json = File.ReadAllText(filePath)
.Replace("\"Branch\": " + currentBranch, "\"Branch\": " + updatedBranch);
File.WriteAllText(filePath, json);
return true;
}
catch (Exception)
{
return false;
}
}
}
}

View File

@ -0,0 +1,21 @@
using System.ComponentModel;
namespace Kavita.Common.Extensions
{
public static class EnumExtensions
{
public static string ToDescription<TEnum>(this TEnum value) where TEnum : struct
{
var fi = value.GetType().GetField(value.ToString() ?? string.Empty);
if (fi == null)
{
return value.ToString();
}
var attributes = (DescriptionAttribute[])fi.GetCustomAttributes(typeof(DescriptionAttribute), false);
return attributes is {Length: > 0} ? attributes[0].Description : value.ToString();
}
}
}

View File

@ -0,0 +1,12 @@
using System.IO;
namespace Kavita.Common.Extensions
{
public static class PathExtensions
{
public static string GetParentDirectory(string filePath)
{
return Path.GetDirectoryName(filePath);
}
}
}

View File

@ -5,7 +5,7 @@ namespace Kavita.Common
{
public static class HashUtil
{
public static string CalculateCrc(string input)
private static string CalculateCrc(string input)
{
uint mCrc = 0xffffffff;
byte[] bytes = Encoding.UTF8.GetBytes(input);
@ -28,10 +28,14 @@ namespace Kavita.Common
return $"{mCrc:x8}";
}
/// <summary>
/// Calculates a unique, Anonymous Token that will represent this unique Kavita installation.
/// </summary>
/// <returns></returns>
public static string AnonymousToken()
{
var seed = $"{Environment.ProcessorCount}_{Environment.OSVersion.Platform}_{Environment.MachineName}_{Environment.UserName}";
return HashUtil.CalculateCrc(seed);
var seed = $"{Environment.ProcessorCount}_{Environment.OSVersion.Platform}_{Configuration.JwtToken}_{Environment.UserName}";
return CalculateCrc(seed);
}
}
}

View File

@ -2,21 +2,21 @@
<PropertyGroup>
<TargetFramework>net5.0</TargetFramework>
<Company>kareadita.github.io</Company>
<Company>kavitareader.com</Company>
<Product>Kavita</Product>
<AssemblyVersion>0.4.2.0</AssemblyVersion>
<AssemblyVersion>0.4.3.0</AssemblyVersion>
<NeutralLanguage>en</NeutralLanguage>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="5.0.0" />
<PackageReference Include="Sentry" Version="3.3.4" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="5.0.0" />
<PackageReference Include="Sentry" Version="3.8.2" />
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.26.0.34506">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Reference Include="JetBrains.ReSharper.TestRunner.Merged, Version=1.3.1.55, Culture=neutral, PublicKeyToken=5c492ec4f3eccde3">
<HintPath>D:\Program Files\JetBrains\JetBrains Rider 2020.3.2\lib\ReSharperHost\TestRunner\netcoreapp2.0\JetBrains.ReSharper.TestRunner.Merged.dll</HintPath>
</Reference>
</ItemGroup>
</Project>

View File

@ -18,7 +18,7 @@ your reading collection with your friends and family!
</div>
## Goals
- [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf)
- [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, pdf)
- [x] First class responsive readers that work great on any device (phone, tablet, desktop)
- [x] Dark and Light themes
- [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books
@ -26,7 +26,7 @@ your reading collection with your friends and family!
- [x] Ability to manage users, access, and ratings
- [ ] Ability to sync ratings and reviews to external services
- [x] Fully Accessible with active accessibility audits
- [x] Dedicated webtoon reader (in beta testing)
- [x] Dedicated webtoon reading mode
- [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects)
## Support
@ -34,6 +34,14 @@ your reading collection with your friends and family!
[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem)
[![GitHub - Bugs and Feature Requests Only](https://img.shields.io/badge/github-issues-red.svg?maxAge=60)](https://github.com/Kareadita/Kavita/issues)
## Demo
If you want to try out Kavita, we have a demo up:
[https://demo.kavitareader.com/](https://demo.kavitareader.com/)
```
Username: demouser
Password: Demouser64
```
## Setup
### Non-Docker
- Unzip the archive for your target OS
@ -84,7 +92,7 @@ This project exists thanks to all the people who contribute. [Contribute](CONTRI
## Donate
If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will go towards
expenses related to Kavita. Back us through [OpenCollective](https://opencollective.com/Kavita#backer).
expenses related to Kavita. Back us through [OpenCollective](https://opencollective.com/Kavita#backer). You can also use [Paypal](https://www.paypal.com/paypalme/majora2007?locale.x=en_US), however your name will not show below.
## Backers
@ -108,9 +116,9 @@ Thank you to [<img src="/Logo/jetbrains.svg" alt="" width="32"> JetBrains](http:
* [<img src="/Logo/dottrace.svg" alt="" width="32"> dotTrace](http://www.jetbrains.com/dottrace/)
## Sentry
Thank you to [<img src="/Logo/sentry.svg" alt="" width="32"> Sentry](https://sentry.io/welcome/) for providing us with free license to their software.
Thank you to [<img src="/Logo/sentry.svg" alt="" width="64">](https://sentry.io/welcome/) for providing us with free license to their software.
### License
* [GNU GPL v3](http://www.gnu.org/licenses/gpl.html)
* Copyright 2010-2021
* Copyright 2020-2021

674
UI/Web/LICENSE Normal file
View File

@ -0,0 +1,674 @@
 GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

27
UI/Web/README.md Normal file
View File

@ -0,0 +1,27 @@
# Kavita Webui
This project was generated with [Angular CLI](https://github.com/angular/angular-cli) version 11.0.0.
## Development server
Run `ng serve` for a dev server. Navigate to `http://localhost:4200/`. The app will automatically reload if you change any of the source files.
## Code scaffolding
Run `ng generate component component-name` to generate a new component. You can also use `ng generate directive|pipe|service|class|guard|interface|enum|module`.
## Build
Run `ng build` to build the project. The build artifacts will be stored in the `dist/` directory. Use the `--prod` flag for a production build.
## Running unit tests
Run `ng test` to execute the unit tests via [Karma](https://karma-runner.github.io).
## Running end-to-end tests
Run `ng e2e` to execute the end-to-end tests via [Protractor](http://www.protractortest.org/).
## Further help
To get more help on the Angular CLI use `ng help` or go check out the [Angular CLI Overview and Command Reference](https://angular.io/cli) page.

139
UI/Web/angular.json Normal file
View File

@ -0,0 +1,139 @@
{
"$schema": "./node_modules/@angular/cli/lib/config/schema.json",
"version": 1,
"newProjectRoot": "projects",
"projects": {
"kavita-webui": {
"projectType": "application",
"schematics": {
"@schematics/angular:component": {
"style": "scss"
},
"@schematics/angular:application": {
"strict": true
}
},
"root": "",
"sourceRoot": "src",
"prefix": "app",
"architect": {
"build": {
"builder": "@angular-devkit/build-angular:browser",
"options": {
"outputPath": "dist",
"index": "src/index.html",
"main": "src/main.ts",
"polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.app.json",
"aot": true,
"assets": [
"src/assets",
"src/site.webmanifest"
],
"sourceMap": {
"hidden": false,
"scripts": true,
"styles": true
},
"styles": [
"src/styles.scss",
"node_modules/@fortawesome/fontawesome-free/css/all.min.css"
],
"scripts": []
},
"configurations": {
"production": {
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
"with": "src/environments/environment.prod.ts"
}
],
"optimization": true,
"outputHashing": "all",
"namedChunks": false,
"extractLicenses": true,
"vendorChunk": true,
"buildOptimizer": true,
"budgets": [
{
"type": "initial",
"maximumWarning": "1mb",
"maximumError": "2mb"
},
{
"type": "anyComponentStyle",
"maximumWarning": "2kb",
"maximumError": "4kb"
}
]
}
}
},
"serve": {
"builder": "@angular-devkit/build-angular:dev-server",
"options": {
"sslKey": "./ssl/server.key",
"sslCert": "./ssl/server.crt",
"ssl": false,
"browserTarget": "kavita-webui:build"
},
"configurations": {
"production": {
"browserTarget": "kavita-webui:build:production"
}
}
},
"extract-i18n": {
"builder": "@angular-devkit/build-angular:extract-i18n",
"options": {
"browserTarget": "kavita-webui:build"
}
},
"test": {
"builder": "@angular-devkit/build-angular:karma",
"options": {
"main": "src/test.ts",
"polyfills": "src/polyfills.ts",
"tsConfig": "tsconfig.spec.json",
"karmaConfig": "karma.conf.js",
"assets": [
"src/assets",
"src/site.webmanifest"
],
"styles": [
"src/styles.scss"
],
"scripts": []
}
},
"lint": {
"builder": "@angular-devkit/build-angular:tslint",
"options": {
"tsConfig": [
"tsconfig.app.json",
"tsconfig.spec.json",
"e2e/tsconfig.json"
],
"exclude": [
"**/node_modules/**"
]
}
},
"e2e": {
"builder": "@angular-devkit/build-angular:protractor",
"options": {
"protractorConfig": "e2e/protractor.conf.js",
"devServerTarget": "kavita-webui:serve"
},
"configurations": {
"production": {
"devServerTarget": "kavita-webui:serve:production"
}
}
}
}
}
},
"defaultProject": "kavita-webui"
}

View File

@ -0,0 +1,37 @@
// @ts-check
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/lib/config.ts
const { SpecReporter, StacktraceOption } = require('jasmine-spec-reporter');
/**
* @type { import("protractor").Config }
*/
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./src/**/*.e2e-spec.ts'
],
capabilities: {
browserName: 'chrome'
},
directConnect: true,
SELENIUM_PROMISE_MANAGER: false,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
onPrepare() {
require('ts-node').register({
project: require('path').join(__dirname, './tsconfig.json')
});
jasmine.getEnv().addReporter(new SpecReporter({
spec: {
displayStacktrace: StacktraceOption.PRETTY
}
}));
}
};

View File

@ -0,0 +1,23 @@
import { AppPage } from './app.po';
import { browser, logging } from 'protractor';
describe('workspace-project App', () => {
let page: AppPage;
beforeEach(() => {
page = new AppPage();
});
it('should display welcome message', async () => {
await page.navigateTo();
expect(await page.getTitleText()).toEqual('kavita-webui app is running!');
});
afterEach(async () => {
// Assert that there are no errors emitted from the browser
const logs = await browser.manage().logs().get(logging.Type.BROWSER);
expect(logs).not.toContain(jasmine.objectContaining({
level: logging.Level.SEVERE,
} as logging.Entry));
});
});

11
UI/Web/e2e/src/app.po.ts Normal file
View File

@ -0,0 +1,11 @@
import { browser, by, element } from 'protractor';
export class AppPage {
async navigateTo(): Promise<unknown> {
return browser.get(browser.baseUrl);
}
async getTitleText(): Promise<string> {
return element(by.css('app-root .content span')).getText();
}
}

13
UI/Web/e2e/tsconfig.json Normal file
View File

@ -0,0 +1,13 @@
/* To learn more about this file see: https://angular.io/config/tsconfig. */
{
"extends": "../tsconfig.json",
"compilerOptions": {
"outDir": "../out-tsc/e2e",
"module": "commonjs",
"target": "es2018",
"types": [
"jasmine",
"node"
]
}
}

37201
UI/Web/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

78
UI/Web/package.json Normal file
View File

@ -0,0 +1,78 @@
{
"name": "kavita-webui",
"version": "0.4.2",
"scripts": {
"ng": "ng",
"start": "ng serve",
"build": "ng build",
"prod": "ng build --prod",
"explore": "ng build --stats-json && webpack-bundle-analyzer ../kavita/API/wwwroot/stats.json",
"test": "jest",
"test:watch": "jest --watch",
"test:coverage": "jest --coverage",
"lint": "ng lint",
"e2e": "ng e2e"
},
"private": true,
"dependencies": {
"@angular-slider/ngx-slider": "^2.0.3",
"@angular/animations": "~11.0.0",
"@angular/common": "~11.0.0",
"@angular/compiler": "~11.0.0",
"@angular/core": "~11.0.0",
"@angular/forms": "~11.0.0",
"@angular/localize": "~11.0.0",
"@angular/platform-browser": "~11.0.0",
"@angular/platform-browser-dynamic": "~11.0.0",
"@angular/router": "~11.0.0",
"@fortawesome/fontawesome-free": "^5.15.1",
"@ng-bootstrap/ng-bootstrap": "^9.1.0",
"@ngx-lite/nav-drawer": "^0.4.6",
"@ngx-lite/util": "0.0.0",
"@sentry/angular": "^6.10.0",
"@sentry/integrations": "^6.10.0",
"@types/file-saver": "^2.0.1",
"angular-ng-autocomplete": "^2.0.5",
"bootstrap": "^4.5.0",
"bowser": "^2.11.0",
"file-saver": "^2.0.5",
"ng-lazyload-image": "^9.1.0",
"ng-sidebar": "^9.4.2",
"ngx-toastr": "^13.2.1",
"rxjs": "~6.6.0",
"swiper": "^6.5.8",
"tslib": "^2.0.0",
"zone.js": "~0.10.2"
},
"devDependencies": {
"@angular-devkit/build-angular": "~0.1100.0",
"@angular/cli": "^11.2.11",
"@angular/compiler-cli": "~11.0.0",
"@types/jest": "^26.0.20",
"@types/node": "^12.11.1",
"codelyzer": "^6.0.0",
"jest": "^26.6.3",
"jest-preset-angular": "^8.3.2",
"karma-coverage": "~2.0.3",
"protractor": "~7.0.0",
"ts-node": "~8.3.0",
"tslint": "^6.1.3",
"typescript": "~4.0.2"
},
"jest": {
"preset": "jest-preset-angular",
"setupFilesAfterEnv": [
"<rootDir>/setupJest.ts"
],
"testPathIgnorePatterns": [
"<rootDir>/node_modules/",
"<rootDir>/dist/"
],
"globals": {
"ts-jest": {
"tsConfig": "<rootDir>/tsconfig.spec.json",
"stringifyContentPathRegex": "\\.html$"
}
}
}
}

19
UI/Web/setupJest.ts Normal file
View File

@ -0,0 +1,19 @@
import 'jest-preset-angular';
/* global mocks for jsdom */
const mock = () => {
let storage: { [key: string]: string } = {};
return {
getItem: (key: string) => (key in storage ? storage[key] : null),
setItem: (key: string, value: string) => (storage[key] = value || ''),
removeItem: (key: string) => delete storage[key],
clear: () => (storage = {})
};
};
Object.defineProperty(window, 'localStorage', { value: mock() });
Object.defineProperty(window, 'sessionStorage', { value: mock() });
Object.defineProperty(window, 'getComputedStyle', {
value: () => ['-webkit-appearance'],
});

View File

@ -0,0 +1,28 @@
import { Injectable } from '@angular/core';
import { CanActivate } from '@angular/router';
import { ToastrService } from 'ngx-toastr';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
import { User } from '../_models/user';
import { AccountService } from '../_services/account.service';
@Injectable({
providedIn: 'root'
})
export class AdminGuard implements CanActivate {
constructor(private accountService: AccountService, private toastr: ToastrService) {}
canActivate(): Observable<boolean> {
// this automaticallys subs due to being router guard
return this.accountService.currentUser$.pipe(
map((user: User) => {
if (this.accountService.hasAdminRole(user)) {
return true;
}
this.toastr.error('You are not authorized to view this page.');
return false;
})
);
}
}

View File

@ -0,0 +1,29 @@
import { Injectable } from '@angular/core';
import { CanActivate, Router } from '@angular/router';
import { ToastrService } from 'ngx-toastr';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
import { User } from '../_models/user';
import { AccountService } from '../_services/account.service';
@Injectable({
providedIn: 'root'
})
export class AuthGuard implements CanActivate {
public urlKey: string = 'kavita--auth-intersection-url';
constructor(private accountService: AccountService, private router: Router, private toastr: ToastrService) {}
canActivate(): Observable<boolean> {
return this.accountService.currentUser$.pipe(
map((user: User) => {
if (user) {
return true;
}
this.toastr.error('You are not authorized to view this page.');
localStorage.setItem(this.urlKey, window.location.pathname);
this.router.navigateByUrl('/libraries');
return false;
})
);
}
}

View File

@ -0,0 +1,17 @@
import { Injectable } from '@angular/core';
import { CanActivate, ActivatedRouteSnapshot, RouterStateSnapshot } from '@angular/router';
import { Observable } from 'rxjs';
import { MemberService } from '../_services/member.service';
@Injectable({
providedIn: 'root'
})
export class LibraryAccessGuard implements CanActivate {
constructor(private memberService: MemberService) {}
canActivate(next: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<boolean> {
const libraryId = parseInt(state.url.split('library/')[1], 10);
return this.memberService.hasLibraryAccess(libraryId);
}
}

View File

@ -0,0 +1,121 @@
import { Injectable, OnDestroy } from '@angular/core';
import {
HttpRequest,
HttpHandler,
HttpEvent,
HttpInterceptor
} from '@angular/common/http';
import { Observable, throwError } from 'rxjs';
import { Router } from '@angular/router';
import { ToastrService } from 'ngx-toastr';
import { catchError, take } from 'rxjs/operators';
import { AccountService } from '../_services/account.service';
import { environment } from 'src/environments/environment';
@Injectable()
export class ErrorInterceptor implements HttpInterceptor {
public urlKey: string = 'kavita--no-connection-url';
constructor(private router: Router, private toastr: ToastrService, private accountService: AccountService) {}
intercept(request: HttpRequest<unknown>, next: HttpHandler): Observable<HttpEvent<unknown>> {
return next.handle(request).pipe(
catchError(error => {
if (error === undefined || error === null) {
return throwError(error);
}
switch (error.status) {
case 400:
this.handleValidationError(error);
break;
case 401:
this.handleAuthError(error);
break;
case 404:
this.handleNotFound(error);
break;
case 500:
this.handleServerException(error);
break;
default:
// Don't throw multiple Something undexpected went wrong
if (this.toastr.previousToastMessage !== 'Something unexpected went wrong.') {
this.toastr.error('Something unexpected went wrong.');
}
// If we are not on no-connection, redirect there and save current url so when we refersh, we redirect back there
if (this.router.url !== '/no-connection') {
localStorage.setItem(this.urlKey, this.router.url);
this.router.navigateByUrl('/no-connection');
}
break;
}
return throwError(error);
})
);
}
private handleValidationError(error: any) {
// This 400 can also be a bad request
if (Array.isArray(error.error)) {
const modalStateErrors: any[] = [];
if (error.error.length > 0 && error.error[0].hasOwnProperty('message')) {
error.error.forEach((issue: {status: string, details: string, message: string}) => {
modalStateErrors.push(issue.details);
});
} else {
error.error.forEach((issue: {code: string, description: string}) => {
modalStateErrors.push(issue.description);
});
}
throw modalStateErrors.flat();
} else if (error.error.errors) {
// Validation error
const modalStateErrors = [];
for (const key in error.error.errors) {
if (error.error.errors[key]) {
modalStateErrors.push(error.error.errors[key]);
}
}
throw modalStateErrors.flat();
} else {
console.error('error:', error);
if (error.statusText === 'Bad Request') {
this.toastr.error(error.error, error.status);
} else {
this.toastr.error(error.statusText === 'OK' ? error.error : error.statusText, error.status);
}
}
}
private handleNotFound(error: any) {
this.toastr.error('That url does not exist.');
}
private handleServerException(error: any) {
const err = error.error;
if (err.hasOwnProperty('message') && err.message.trim() !== '') {
if (err.message != 'User is not authenticated') {
console.log('500 error: ', error);
}
this.toastr.error(err.message);
} else {
this.toastr.error('There was an unknown critical error.');
console.error('500 error:', error);
}
}
private handleAuthError(error: any) {
// NOTE: Signin has error.error or error.statusText available.
// if statement is due to http/2 spec issue: https://github.com/angular/angular/issues/23334
this.accountService.currentUser$.pipe(take(1)).subscribe(user => {
if (user) {
this.toastr.error(error.statusText === 'OK' ? 'Unauthorized' : error.statusText, error.status);
}
this.accountService.logout();
});
}
}

View File

@ -0,0 +1,36 @@
import { Injectable } from '@angular/core';
import {
HttpRequest,
HttpHandler,
HttpEvent,
HttpInterceptor
} from '@angular/common/http';
import { Observable } from 'rxjs';
import { AccountService } from '../_services/account.service';
import { User } from '../_models/user';
import { take } from 'rxjs/operators';
@Injectable()
export class JwtInterceptor implements HttpInterceptor {
constructor(private accountService: AccountService) {}
intercept(request: HttpRequest<unknown>, next: HttpHandler): Observable<HttpEvent<unknown>> {
let currentUser: User;
// Take 1 means we don't have to unsubscribe because we take 1 then complete
this.accountService.currentUser$.pipe(take(1)).subscribe(user => {
currentUser = user;
if (currentUser) {
request = request.clone({
setHeaders: {
Authorization: `Bearer ${currentUser.token}`
}
});
}
});
return next.handle(request);
}
}

Some files were not shown because too many files have changed in this diff Show More