From 8175ddf517c11eb94d526694f230c4551ab7edd6 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Sat, 5 Jun 2021 16:30:50 -0500 Subject: [PATCH 01/55] Fixed a critical crash in Scan library where Series Metadata was getting regenerated and unique constraint failed. (#269) --- API/Services/Tasks/ScannerService.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index e22803c4b..12f30afad 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -228,7 +228,6 @@ namespace API.Services.Tasks existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name); existingSeries.OriginalName ??= infos[0].Series; - existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List()); } // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series From f0f74e867e594930d6e07dd066acace95ddfe695 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Sun, 6 Jun 2021 16:13:07 -0500 Subject: [PATCH 02/55] Fixed the constraint issue with Metadata and ensure we log parameters on development modes. (#273) --- API/Data/LibraryRepository.cs | 2 ++ API/Extensions/ApplicationServiceExtensions.cs | 5 ++++- API/Services/Tasks/ScannerService.cs | 1 + API/Startup.cs | 6 ++++-- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs index c065bface..0af1f6760 100644 --- a/API/Data/LibraryRepository.cs +++ b/API/Data/LibraryRepository.cs @@ -106,6 +106,8 @@ namespace API.Data .Where(x => x.Id == libraryId) .Include(f => f.Folders) .Include(l => l.Series) + .ThenInclude(s => s.Metadata) + .Include(l => l.Series) .ThenInclude(s => s.Volumes) .ThenInclude(v => v.Chapters) .ThenInclude(c => c.Files) diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index a3406ae27..2169aeb67 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -4,16 +4,18 @@ using API.Interfaces; using API.Interfaces.Services; using API.Services; using API.Services.Tasks; +using Microsoft.AspNetCore.Hosting; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Logging; namespace API.Extensions { public static class ApplicationServiceExtensions { - public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config) + public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env) { services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly); services.AddScoped(); @@ -32,6 +34,7 @@ namespace API.Extensions services.AddDbContext(options => { options.UseSqlite(config.GetConnectionString("DefaultConnection")); + options.EnableSensitiveDataLogging(env.IsDevelopment()); }); services.AddLogging(loggingBuilder => diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 12f30afad..e22803c4b 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -228,6 +228,7 @@ namespace API.Services.Tasks existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name); existingSeries.OriginalName ??= infos[0].Series; + existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List()); } // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series diff --git a/API/Startup.cs b/API/Startup.cs index 82fd667a3..97d64145e 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -24,16 +24,18 @@ namespace API public class Startup { private readonly IConfiguration _config; + private readonly IWebHostEnvironment _env; - public Startup(IConfiguration config) + public Startup(IConfiguration config, IWebHostEnvironment env) { _config = config; + _env = env; } // This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { - services.AddApplicationServices(_config); + services.AddApplicationServices(_config, _env); services.AddControllers(); services.Configure(options => { From e4a9c468fe9055d2a59d49bc80443656b560ed34 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 7 Jun 2021 11:32:26 -0500 Subject: [PATCH 03/55] Version bump and reduced some sentry logs --- API/Services/ArchiveService.cs | 4 ++-- API/Services/BookService.cs | 12 ++++++------ Kavita.Common/Kavita.Common.csproj | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 9adb19c0c..4cd25d2d9 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -245,13 +245,13 @@ namespace API.Services { if (!File.Exists(archivePath)) { - _logger.LogError("Archive {ArchivePath} could not be found", archivePath); + _logger.LogWarning("Archive {ArchivePath} could not be found", archivePath); return false; } if (Parser.Parser.IsArchive(archivePath) || Parser.Parser.IsEpub(archivePath)) return true; - _logger.LogError("Archive {ArchivePath} is not a valid archive", archivePath); + _logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath); return false; } diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 08c4e2209..8be3bb0c8 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -167,7 +167,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was an exception getting summary, defaulting to empty string"); + _logger.LogWarning(ex, "[BookService] There was an exception getting summary, defaulting to empty string"); } return string.Empty; @@ -177,13 +177,13 @@ namespace API.Services { if (!File.Exists(filePath)) { - _logger.LogError("[BookService] Book {EpubFile} could not be found", filePath); + _logger.LogWarning("[BookService] Book {EpubFile} could not be found", filePath); return false; } if (Parser.Parser.IsBook(filePath)) return true; - _logger.LogError("[BookService] Book {EpubFile} is not a valid EPUB", filePath); + _logger.LogWarning("[BookService] Book {EpubFile} is not a valid EPUB", filePath); return false; } @@ -198,7 +198,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); + _logger.LogWarning(ex, "[BookService] There was an exception getting number of pages, defaulting to 0"); } return 0; @@ -250,7 +250,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); + _logger.LogWarning(ex, "[BookService] There was an exception when opening epub book: {FileName}", filePath); } return null; @@ -285,7 +285,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogError(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); + _logger.LogWarning(ex, "[BookService] There was a critical error and prevented thumbnail generation on {BookFile}. Defaulting to no cover image", fileFilePath); } return Array.Empty(); diff --git a/Kavita.Common/Kavita.Common.csproj b/Kavita.Common/Kavita.Common.csproj index 43fe7f53d..673974db1 100644 --- a/Kavita.Common/Kavita.Common.csproj +++ b/Kavita.Common/Kavita.Common.csproj @@ -4,7 +4,7 @@ net5.0 kareadita.github.io Kavita - 0.4.1.0 + 0.4.1.1 en From 4606b546037197d6cefeb80f5618a4ab3793bc35 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 7 Jun 2021 13:16:50 -0500 Subject: [PATCH 04/55] Removed the SeriesMetadata migration since users have updated to v0.4.1. Any other users will require a scan to get the SeriesMetadata generated. (#275) --- API/Data/Seed.cs | 17 ----------------- API/Program.cs | 2 -- 2 files changed, 19 deletions(-) diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index 2dfeb1c0a..ad0c09236 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -7,7 +7,6 @@ using API.Entities; using API.Entities.Enums; using API.Services; using Microsoft.AspNetCore.Identity; -using Microsoft.EntityFrameworkCore; namespace API.Data { @@ -56,21 +55,5 @@ namespace API.Data await context.SaveChangesAsync(); } - - public static async Task SeedSeriesMetadata(DataContext context) - { - await context.Database.EnsureCreatedAsync(); - - context.Database.EnsureCreated(); - var series = await context.Series - .Include(s => s.Metadata).ToListAsync(); - - foreach (var s in series) - { - s.Metadata ??= new SeriesMetadata(); - } - - await context.SaveChangesAsync(); - } } } \ No newline at end of file diff --git a/API/Program.cs b/API/Program.cs index b2612efbe..da444974e 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -61,8 +61,6 @@ namespace API await context.Database.MigrateAsync(); await Seed.SeedRoles(roleManager); await Seed.SeedSettings(context); - // TODO: Remove this in v0.4.2 - await Seed.SeedSeriesMetadata(context); } catch (Exception ex) { From 5259a1484ad68546f334fe1be738855e36bbc1fa Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 7 Jun 2021 14:36:35 -0500 Subject: [PATCH 05/55] Implemented the ability to configure the default port for non-docker users. Docker users will always be 5000. (#280) --- API/Controllers/SettingsController.cs | 8 +++-- API/Data/Seed.cs | 2 +- API/Program.cs | 7 ++-- API/appsettings.Development.json | 3 +- Kavita.Common/Configuration.cs | 47 +++++++++++++++++++++++++++ build.sh | 3 +- 6 files changed, 63 insertions(+), 7 deletions(-) diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index 33565af56..7ceb0443a 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -8,6 +8,7 @@ using API.Entities.Enums; using API.Extensions; using API.Helpers.Converters; using API.Interfaces; +using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; @@ -34,7 +35,9 @@ namespace API.Controllers [HttpGet("")] public async Task> GetSettings() { - return Ok(await _unitOfWork.SettingsRepository.GetSettingsDtoAsync()); + var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + settingsDto.Port = Configuration.GetPort(Program.GetAppSettingFilename()); + return Ok(settingsDto); } [Authorize(Policy = "RequireAdminRole")] @@ -76,7 +79,8 @@ namespace API.Controllers if (setting.Key == ServerSettingKey.Port && updateSettingsDto.Port + "" != setting.Value) { setting.Value = updateSettingsDto.Port + ""; - Environment.SetEnvironmentVariable("KAVITA_PORT", setting.Value); + // Port is managed in appSetting.json + Configuration.UpdatePort(Program.GetAppSettingFilename(), updateSettingsDto.Port); _unitOfWork.SettingsRepository.Update(setting); } diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index ad0c09236..a80cba4bd 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -41,7 +41,7 @@ namespace API.Data //new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))}, - new () {Key = ServerSettingKey.Port, Value = "5000"}, + //new () {Key = ServerSettingKey.Port, Value = "5000"}, // TODO: Remove ServerSettingKey }; foreach (var defaultSetting in defaultSettings) diff --git a/API/Program.cs b/API/Program.cs index da444974e..8b43951a1 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -20,13 +20,13 @@ namespace API { public class Program { - private static readonly int HttpPort = 5000; + private static int HttpPort; protected Program() { } - private static string GetAppSettingFilename() + public static string GetAppSettingFilename() { var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"); var isDevelopment = environment == Environments.Development; @@ -46,6 +46,9 @@ namespace API var base64 = Convert.ToBase64String(rBytes).Replace("/", ""); Configuration.UpdateJwtToken(GetAppSettingFilename(), base64); } + + // Get HttpPort from Config + HttpPort = Configuration.GetPort(GetAppSettingFilename()); var host = CreateHostBuilder(args).Build(); diff --git a/API/appsettings.Development.json b/API/appsettings.Development.json index 4dd015431..119a1eb46 100644 --- a/API/appsettings.Development.json +++ b/API/appsettings.Development.json @@ -17,5 +17,6 @@ "FileSizeLimitBytes": 0, "MaxRollingFiles": 0 } - } + }, + "Port": 5000 } diff --git a/Kavita.Common/Configuration.cs b/Kavita.Common/Configuration.cs index 02a01c9d8..46b23cf77 100644 --- a/Kavita.Common/Configuration.cs +++ b/Kavita.Common/Configuration.cs @@ -1,6 +1,7 @@ using System; using System.IO; using System.Text.Json; +using Kavita.Common.EnvironmentInfo; namespace Kavita.Common { @@ -28,6 +29,7 @@ namespace Kavita.Common return false; } + public static bool UpdateJwtToken(string filePath, string token) { @@ -42,5 +44,50 @@ namespace Kavita.Common return false; } } + + public static bool UpdatePort(string filePath, int port) + { + if (new OsInfo(Array.Empty()).IsDocker) + { + return true; + } + + try + { + var currentPort = GetPort(filePath); + var json = File.ReadAllText(filePath).Replace("\"Port\": " + currentPort, "\"Port\": " + port); + File.WriteAllText(filePath, json); + return true; + } + catch (Exception) + { + return false; + } + } + + public static int GetPort(string filePath) + { + const int defaultPort = 5000; + if (new OsInfo(Array.Empty()).IsDocker) + { + return defaultPort; + } + + try { + var json = File.ReadAllText(filePath); + var jsonObj = JsonSerializer.Deserialize(json); + const string key = "Port"; + + if (jsonObj.TryGetProperty(key, out JsonElement tokenElement)) + { + return tokenElement.GetInt32(); + } + } + catch (Exception ex) { + Console.WriteLine("Error writing app settings: " + ex.Message); + } + + return defaultPort; + } } } \ No newline at end of file diff --git a/build.sh b/build.sh index 043cb559f..d32da32dc 100644 --- a/build.sh +++ b/build.sh @@ -15,6 +15,7 @@ ProgressEnd() UpdateVersionNumber() { + # TODO: Enhance this to increment version number in KavitaCommon.csproj if [ "$KAVITAVERSION" != "" ]; then echo "Updating Version Info" sed -i'' -e "s/[0-9.*]\+<\/AssemblyVersion>/$KAVITAVERSION<\/AssemblyVersion>/g" src/Directory.Build.props @@ -31,7 +32,7 @@ Build() slnFile=Kavita.sln - dotnet clean $slnFile -c Debug + #dotnet clean $slnFile -c Debug dotnet clean $slnFile -c Release if [[ -z "$RID" ]]; From 41a5d1bf2c1f3b99554aec9f8712346c9916ae10 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 7 Jun 2021 14:49:38 -0500 Subject: [PATCH 06/55] =?UTF-8?q?Fixed=20a=20bug=20on=20getting=20next/pre?= =?UTF-8?q?vious=20chapters=20where=20if=20chapters=20were=20=E2=80=A6=20(?= =?UTF-8?q?#281)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fixed a bug on getting next/previous chapters where if chapters were not inserted into the DB in the natural order, then the next/prev chapter would be skewed. * Make GetNextChapterId static --- API/Controllers/ReaderController.cs | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 5a39f354a..c364fac48 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -272,20 +272,10 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); - + var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId); if (currentVolume.Number == 0) { - var next = false; - foreach (var chapter in currentVolume.Chapters) - { - if (next) - { - return Ok(chapter.Id); - } - if (currentChapterId == chapter.Id) next = true; - } - - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } @@ -293,7 +283,7 @@ namespace API.Controllers { if (volume.Number == currentVolume.Number && volume.Chapters.Count > 1) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } @@ -305,7 +295,7 @@ namespace API.Controllers return Ok(-1); } - private int GetNextChapterId(IEnumerable chapters, int currentChapterId) + private static int GetNextChapterId(IEnumerable chapters, string currentChapterNumber) { var next = false; foreach (var chapter in chapters) @@ -314,7 +304,7 @@ namespace API.Controllers { return chapter.Id; } - if (currentChapterId == chapter.Id) next = true; + if (currentChapterNumber.Equals(chapter.Number)) next = true; } return -1; @@ -333,11 +323,11 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, user.Id); var currentVolume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); - + var currentChapter = await _unitOfWork.VolumeRepository.GetChapterAsync(currentChapterId); if (currentVolume.Number == 0) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } @@ -345,7 +335,7 @@ namespace API.Controllers { if (volume.Number == currentVolume.Number) { - var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapterId); + var chapterId = GetNextChapterId(currentVolume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparer).Reverse(), currentChapter.Number); if (chapterId > 0) return Ok(chapterId); } if (volume.Number == currentVolume.Number - 1) From d00cf7de92cb6f11032a9a9f86f448ac70e33670 Mon Sep 17 00:00:00 2001 From: Kizaing Date: Mon, 7 Jun 2021 17:04:08 -0400 Subject: [PATCH 07/55] Merged Dockerfiles (#282) * Dockerfile re-imagined to handle all flavors in one script. --- Dockerfile | 34 +++++++--------- Dockerfile.alpine | 28 -------------- Dockerfile.arm | 27 ------------- build_target.sh | 27 ------------- copy_runtime.sh | 19 +++++++++ docker-build.sh | 98 +++++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 130 insertions(+), 103 deletions(-) delete mode 100644 Dockerfile.alpine delete mode 100644 Dockerfile.arm delete mode 100644 build_target.sh create mode 100644 copy_runtime.sh create mode 100644 docker-build.sh diff --git a/Dockerfile b/Dockerfile index 7f4e8ac71..62dfe4336 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,35 +1,27 @@ -#This Dockerfile pulls the latest git commit and builds Kavita from source -FROM mcr.microsoft.com/dotnet/sdk:5.0-focal AS builder - -MAINTAINER Chris P - -ENV DEBIAN_FRONTEND=noninteractive -ARG TARGETPLATFORM - -#Installs nodejs and npm -RUN curl -fsSL https://deb.nodesource.com/setup_14.x | bash - \ - && apt-get install -y nodejs \ - && rm -rf /var/lib/apt/lists/* - -#Builds app based on platform -COPY build_target.sh /build_target.sh -RUN /build_target.sh +#This Dockerfile creates a build for all architectures #Production image -FROM ubuntu:focal +FROM ubuntu:focal AS copytask -MAINTAINER Chris P +ARG TARGETPLATFORM #Move the output files to where they need to be -COPY --from=builder /Projects/Kavita/_output/build/Kavita /kavita +RUN mkdir /files +COPY _output/*.tar.gz /files/ +COPY copy_runtime.sh /copy_runtime.sh +RUN /copy_runtime.sh + +FROM ubuntu:focal + +COPY --from=copytask /kavita /kavita #Installs program dependencies RUN apt-get update \ && apt-get install -y libicu-dev libssl1.1 pwgen \ && rm -rf /var/lib/apt/lists/* -#Creates the manga storage directory -RUN mkdir /manga /kavita/data +#Creates the data directory +RUN mkdir /kavita/data RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \ && sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json diff --git a/Dockerfile.alpine b/Dockerfile.alpine deleted file mode 100644 index faacfa823..000000000 --- a/Dockerfile.alpine +++ /dev/null @@ -1,28 +0,0 @@ -#This Dockerfile is for the musl alpine build of Kavita. -FROM alpine:latest - -MAINTAINER Chris P - -#Installs the needed dependencies -RUN apk update && apk add --no-cache wget curl pwgen icu-dev bash - -#Downloads Kavita, unzips and moves the folders to where they need to be -RUN wget https://github.com/Kareadita/Kavita/releases/download/v0.3.7/kavita-linux-musl-x64.tar.gz \ - && tar -xzf kavita*.tar.gz \ - && mv Kavita/ /kavita/ \ - && rm kavita*.gz \ - && chmod +x /kavita/Kavita - -#Creates the needed folders -RUN mkdir /manga /kavita/data /kavita/temp /kavita/cache - -RUN sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json - -COPY entrypoint.sh /entrypoint.sh - -EXPOSE 5000 - -WORKDIR /kavita - -ENTRYPOINT ["/bin/bash"] -CMD ["/entrypoint.sh"] diff --git a/Dockerfile.arm b/Dockerfile.arm deleted file mode 100644 index e28430a38..000000000 --- a/Dockerfile.arm +++ /dev/null @@ -1,27 +0,0 @@ -#This Dockerfile pulls the latest git commit and builds Kavita from source - -#Production image -FROM ubuntu:focal - -#Move the output files to where they need to be -COPY Kavita /kavita - -#Installs program dependencies -RUN apt-get update \ - && apt-get install -y libicu-dev libssl1.1 pwgen \ - && rm -rf /var/lib/apt/lists/* - -#Creates the manga storage directory -RUN mkdir /kavita/data - -RUN cp /kavita/appsettings.Development.json /kavita/appsettings.json \ - && sed -i 's/Data source=kavita.db/Data source=data\/kavita.db/g' /kavita/appsettings.json - -COPY entrypoint.sh /entrypoint.sh - -EXPOSE 5000 - -WORKDIR /kavita - -ENTRYPOINT ["/bin/bash"] -CMD ["/entrypoint.sh"] diff --git a/build_target.sh b/build_target.sh deleted file mode 100644 index 56c54ba79..000000000 --- a/build_target.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -mkdir Projects - -cd Projects - -git clone https://github.com/Kareadita/Kavita.git -git clone https://github.com/Kareadita/Kavita-webui.git - -cd Kavita -chmod +x build.sh - -#Builds program based on the target platform - -if [ "$TARGETPLATFORM" == "linux/amd64" ] -then - ./build.sh linux-x64 - mv /Projects/Kavita/_output/linux-x64 /Projects/Kavita/_output/build -elif [ "$TARGETPLATFORM" == "linux/arm/v7" ] -then - ./build.sh linux-arm - mv /Projects/Kavita/_output/linux-arm /Projects/Kavita/_output/build -elif [ "$TARGETPLATFORM" == "linux/arm64" ] -then - ./build.sh linux-arm64 - mv /Projects/Kavita/_output/linux-arm64 /Projects/Kavita/_output/build -fi diff --git a/copy_runtime.sh b/copy_runtime.sh new file mode 100644 index 000000000..f46a720f1 --- /dev/null +++ b/copy_runtime.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +#Copies the correct version of Kavita into the image + +set -xv + +if [ "$TARGETPLATFORM" == "linux/amd64" ] +then + tar xf /files/kavita-linux-x64.tar.gz -C / + mv /Kavita /kavita +elif [ "$TARGETPLATFORM" == "linux/arm/v7" ] +then + tar xf /files/kavita-linux-arm.tar.gz -C / + mv /Kavita /kavita +elif [ "$TARGETPLATFORM" == "linux/arm64" ] +then + tar xf /files/kavita-linux-arm64.tar.gz -C / + mv /Kavita /kavita +fi diff --git a/docker-build.sh b/docker-build.sh new file mode 100644 index 000000000..05b05dda4 --- /dev/null +++ b/docker-build.sh @@ -0,0 +1,98 @@ +#! /bin/bash +set -e + +outputFolder='_output' + +ProgressStart() +{ + echo "Start '$1'" +} + +ProgressEnd() +{ + echo "Finish '$1'" +} + +Build() +{ + local RID="$1" + + ProgressStart 'Build for $RID' + + slnFile=Kavita.sln + + dotnet clean $slnFile -c Debug + dotnet clean $slnFile -c Release + + dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID + + ProgressEnd 'Build for $RID' +} + +BuildUI() +{ + ProgressStart 'Building UI' + cd ../Kavita-webui/ || exit + npm install + npm run prod + cd ../Kavita/ || exit + ProgressEnd 'Building UI' +} + +Package() +{ + local framework="$1" + local runtime="$2" + local lOutputFolder=../_output/"$runtime"/Kavita + + ProgressStart "Creating $runtime Package for $framework" + + # TODO: Use no-restore? Because Build should have already done it for us + echo "Building" + cd API + echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + + echo "Copying Install information" + cp ../INSTALL.txt "$lOutputFolder"/README.txt + + echo "Copying LICENSE" + cp ../LICENSE "$lOutputFolder"/LICENSE.txt + + echo "Renaming API -> Kavita" + mv "$lOutputFolder"/API "$lOutputFolder"/Kavita + + echo "Creating tar" + cd ../$outputFolder/"$runtime"/ + tar -czvf ../kavita-$runtime.tar.gz Kavita + + ProgressEnd "Creating $runtime Package for $framework" + +} + +dir=$PWD + +if [ -d _output ] +then + rm -r _output/ +fi + +BuildUI + +#Build for x64 +Build "linux-x64" +Package "net5.0" "linux-x64" +cd "$dir" + +#Build for arm +Build "linux-arm" +Package "net5.0" "linux-arm" +cd "$dir" + +#Build for arm64 +Build "linux-arm64" +Package "net5.0" "linux-arm64" +cd "$dir" + +#Builds Docker images +docker buildx build -t kizaing/kavita:nightly --platform linux/amd64,linux/arm/v7,linux/arm64 . --push \ No newline at end of file From 6f124b6f8a6f165e1cd67ea9e799af0b7c57fc4b Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 7 Jun 2021 16:12:07 -0500 Subject: [PATCH 08/55] Add try catch on Parser MinimumNumberFromRange in case something weird gets put in here. (#283) --- API/Parser/Parser.cs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index e5c9226b3..050a6ec29 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -795,12 +795,20 @@ namespace API.Parser public static float MinimumNumberFromRange(string range) { - if (!Regex.IsMatch(range, @"^[\d-.]+$")) + try + { + if (!Regex.IsMatch(range, @"^[\d-.]+$")) + { + return (float) 0.0; + } + + var tokens = range.Replace("_", string.Empty).Split("-"); + return tokens.Min(float.Parse); + } + catch { return (float) 0.0; } - var tokens = range.Replace("_", string.Empty).Split("-"); - return tokens.Min(float.Parse); } public static string Normalize(string name) From 7dae1da92f131a372126fe6972e037e429048c1a Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 7 Jun 2021 17:52:11 -0500 Subject: [PATCH 09/55] Logging Enhancements (#285) * If the user is on Debug, allow logging DB params to the logger. Implemented the ability to change logger from UI and to keep the DB settings of LogLevel and Port in sync with appsettings. * Exclude a lot more clutter from hitting Sentry * Removed github action * Small cleanup --- API/Controllers/SettingsController.cs | 4 +- API/Data/Seed.cs | 17 +++++-- .../ApplicationServiceExtensions.cs | 8 +-- API/Program.cs | 13 ++++- API/Services/ArchiveService.cs | 4 +- Kavita.Common/Configuration.cs | 51 +++++++++++++++++-- 6 files changed, 81 insertions(+), 16 deletions(-) diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index 7ceb0443a..b0bc941af 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -37,6 +37,7 @@ namespace API.Controllers { var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); settingsDto.Port = Configuration.GetPort(Program.GetAppSettingFilename()); + settingsDto.LoggingLevel = Configuration.GetLogLevel(Program.GetAppSettingFilename()); return Ok(settingsDto); } @@ -87,6 +88,7 @@ namespace API.Controllers if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + "" != setting.Value) { setting.Value = updateSettingsDto.LoggingLevel + ""; + Configuration.UpdateLogLevel(Program.GetAppSettingFilename(), updateSettingsDto.LoggingLevel); _unitOfWork.SettingsRepository.Update(setting); } } @@ -120,7 +122,7 @@ namespace API.Controllers [HttpGet("log-levels")] public ActionResult> GetLogLevels() { - return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical", "None"}); + return Ok(new [] {"Trace", "Debug", "Information", "Warning", "Critical"}); } } } \ No newline at end of file diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index a80cba4bd..511fb8c1c 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -6,6 +6,7 @@ using API.Constants; using API.Entities; using API.Entities.Enums; using API.Services; +using Kavita.Common; using Microsoft.AspNetCore.Identity; namespace API.Data @@ -38,12 +39,12 @@ namespace API.Data { new() {Key = ServerSettingKey.CacheDirectory, Value = CacheService.CacheDirectory}, new () {Key = ServerSettingKey.TaskScan, Value = "daily"}, - //new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, + new () {Key = ServerSettingKey.LoggingLevel, Value = "Information"}, // Not used from DB, but DB is sync with appSettings.json new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))}, - //new () {Key = ServerSettingKey.Port, Value = "5000"}, // TODO: Remove ServerSettingKey + new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json }; - + foreach (var defaultSetting in defaultSettings) { var existing = context.ServerSetting.FirstOrDefault(s => s.Key == defaultSetting.Key); @@ -54,6 +55,16 @@ namespace API.Data } await context.SaveChangesAsync(); + + // Port and LoggingLevel are managed in appSettings.json. Update the DB values to match + var configFile = Program.GetAppSettingFilename(); + context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.Port).Value = + Configuration.GetPort(configFile) + ""; + context.ServerSetting.FirstOrDefault(s => s.Key == ServerSettingKey.LoggingLevel).Value = + Configuration.GetLogLevel(configFile); + + await context.SaveChangesAsync(); + } } } \ No newline at end of file diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index 2169aeb67..b611cf4d6 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -1,9 +1,11 @@ -using API.Data; +using System; +using API.Data; using API.Helpers; using API.Interfaces; using API.Interfaces.Services; using API.Services; using API.Services.Tasks; +using Kavita.Common; using Microsoft.AspNetCore.Hosting; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Configuration; @@ -29,12 +31,12 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); services.AddScoped(); - + services.AddDbContext(options => { options.UseSqlite(config.GetConnectionString("DefaultConnection")); - options.EnableSensitiveDataLogging(env.IsDevelopment()); + options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.GetLogLevel(Program.GetAppSettingFilename()).Equals("Debug")); }); services.AddLogging(loggingBuilder => diff --git a/API/Program.cs b/API/Program.cs index 8b43951a1..b084c2ef3 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -64,6 +64,7 @@ namespace API await context.Database.MigrateAsync(); await Seed.SeedRoles(roleManager); await Seed.SeedSettings(context); + } catch (Exception ex) { @@ -107,8 +108,16 @@ namespace API options.BeforeSend = sentryEvent => { if (sentryEvent.Exception != null - && sentryEvent.Exception.Message.Contains("[GetCoverImage] This archive cannot be read:") - && sentryEvent.Exception.Message.Contains("[BookService] ")) + && sentryEvent.Exception.Message.StartsWith("[GetCoverImage]") + && sentryEvent.Exception.Message.StartsWith("[BookService]") + && sentryEvent.Exception.Message.StartsWith("[ExtractArchive]") + && sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]") + && sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]") + && sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]") + && sentryEvent.Exception.Message.Contains("EPUB parsing error") + && sentryEvent.Exception.Message.Contains("Unsupported EPUB version") + && sentryEvent.Exception.Message.Contains("Incorrect EPUB") + && sentryEvent.Exception.Message.Contains("Access is Denied")) { return null; // Don't send this event to Sentry } diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 4cd25d2d9..1f99334b7 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -230,7 +230,7 @@ namespace API.Services } catch (Exception ex) { - _logger.LogWarning(ex, "There was an error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName); + _logger.LogWarning(ex, "[GetCoverImage] There was an error and prevented thumbnail generation on {EntryName}. Defaulting to no cover image", entryName); } return Array.Empty(); @@ -407,7 +407,7 @@ namespace API.Services } catch (Exception e) { - _logger.LogWarning(e, "There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); + _logger.LogWarning(e, "[ExtractArchive] There was a problem extracting {ArchivePath} to {ExtractPath}",archivePath, extractPath); return; } _logger.LogDebug("Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds); diff --git a/Kavita.Common/Configuration.cs b/Kavita.Common/Configuration.cs index 46b23cf77..755e57743 100644 --- a/Kavita.Common/Configuration.cs +++ b/Kavita.Common/Configuration.cs @@ -7,7 +7,7 @@ namespace Kavita.Common { public static class Configuration { - + #region JWT Token public static bool CheckIfJwtTokenSet(string filePath) { try { @@ -29,8 +29,6 @@ namespace Kavita.Common return false; } - - public static bool UpdateJwtToken(string filePath, string token) { try @@ -44,7 +42,8 @@ namespace Kavita.Common return false; } } - + #endregion + #region Port public static bool UpdatePort(string filePath, int port) { if (new OsInfo(Array.Empty()).IsDocker) @@ -64,7 +63,6 @@ namespace Kavita.Common return false; } } - public static int GetPort(string filePath) { const int defaultPort = 5000; @@ -89,5 +87,48 @@ namespace Kavita.Common return defaultPort; } + #endregion + #region LogLevel + public static bool UpdateLogLevel(string filePath, string logLevel) + { + try + { + var currentLevel = GetLogLevel(filePath); + var json = File.ReadAllText(filePath).Replace($"\"Default\": \"{currentLevel}\"", $"\"Default\": \"{logLevel}\""); + File.WriteAllText(filePath, json); + return true; + } + catch (Exception) + { + return false; + } + } + public static string GetLogLevel(string filePath) + { + try { + var json = File.ReadAllText(filePath); + var jsonObj = JsonSerializer.Deserialize(json); + if (jsonObj.TryGetProperty("Logging", out JsonElement tokenElement)) + { + foreach (var property in tokenElement.EnumerateObject()) + { + if (!property.Name.Equals("LogLevel")) continue; + foreach (var logProperty in property.Value.EnumerateObject()) + { + if (logProperty.Name.Equals("Default")) + { + return logProperty.Value.GetString(); + } + } + } + } + } + catch (Exception ex) { + Console.WriteLine("Error writing app settings: " + ex.Message); + } + + return "Information"; + } + #endregion } } \ No newline at end of file From 4f29cf7cc7fada13aca41362dbe66e772b95683a Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Tue, 8 Jun 2021 09:59:47 -0400 Subject: [PATCH 10/55] Bugfix/manga reader pad zeros (#288) * Switched to using existing NaturalSortComparer for ordering filenames before we reprocess them to ensure they are in the correct natural reading order. Co-authored-by: Joseph Milazzo --- API/Extensions/DirectoryInfoExtensions.cs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/API/Extensions/DirectoryInfoExtensions.cs b/API/Extensions/DirectoryInfoExtensions.cs index c41ca9f8b..0eaf6c00a 100644 --- a/API/Extensions/DirectoryInfoExtensions.cs +++ b/API/Extensions/DirectoryInfoExtensions.cs @@ -1,12 +1,12 @@ -using System; -using System.IO; +using System.IO; using System.Linq; -using API.Services; +using API.Comparators; namespace API.Extensions { public static class DirectoryInfoExtensions { + private static readonly NaturalSortComparer Comparer = new NaturalSortComparer(); public static void Empty(this DirectoryInfo directory) { foreach(FileInfo file in directory.EnumerateFiles()) file.Delete(); @@ -49,12 +49,13 @@ namespace API.Extensions if (!root.FullName.Equals(directory.FullName)) { var fileIndex = 1; - foreach (var file in directory.EnumerateFiles()) + + foreach (var file in directory.EnumerateFiles().OrderBy(file => file.FullName, Comparer)) { if (file.Directory == null) continue; var paddedIndex = Parser.Parser.PadZeros(directoryIndex + ""); // We need to rename the files so that after flattening, they are in the order we found them - var newName = $"{paddedIndex}_{fileIndex}.{file.Extension}"; + var newName = $"{paddedIndex}_{Parser.Parser.PadZeros(fileIndex + "")}{file.Extension}"; var newPath = Path.Join(root.FullName, newName); if (!File.Exists(newPath)) file.MoveTo(newPath); fileIndex++; From d261449275c8de349b4e2687282693647b84c9ad Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Tue, 8 Jun 2021 13:16:31 -0400 Subject: [PATCH 11/55] Adding releases to be sent to discord (#291) Uses: https://github.com/marketplace/actions/discord-styled-releases-patch-notes --- .github/workflows/discord-release-msg.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .github/workflows/discord-release-msg.yml diff --git a/.github/workflows/discord-release-msg.yml b/.github/workflows/discord-release-msg.yml new file mode 100644 index 000000000..f3acd153e --- /dev/null +++ b/.github/workflows/discord-release-msg.yml @@ -0,0 +1,17 @@ +name: Release messages to discord announcement channel + +on: + release: + types: + - created + +jobs: + run_main: + runs-on: ubuntu-18.04 + name: Sends custom message + steps: + - name: Sending message + uses: nhevia/discord-styled-releases@main + with: + webhook_id: ${{ secrets.DISCORD_WEBHOOK_ID }} + webhook_token: ${{ secrets.DISCORD_WEBHOOK_TOKEN }} \ No newline at end of file From 8d8e7ce93b9680ed0bc6ac58b39c16409b15132e Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Tue, 8 Jun 2021 12:33:54 -0500 Subject: [PATCH 12/55] Book Grouping (#292) * Implemented the ability to group books by series if certain calibre tags are in the epub. --- API/Services/BookService.cs | 49 +++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 8be3bb0c8..3d35919f0 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -234,6 +234,55 @@ namespace API.Services try { using var epubBook = EpubReader.OpenBook(filePath); + + // If the epub has the following tags, we can group the books as Volumes + // + // + // + // If all three are present, we can take that over dc:title and format as: + // Series = The Dark Tower, Volume = 5, Filename as "Wolves of the Calla" + try + { + string seriesIndex = string.Empty; + string series = string.Empty; + string specialName = string.Empty; + + foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) + { + switch (metadataItem.Name) + { + case "calibre:series_index": + seriesIndex = metadataItem.Content; + break; + case "calibre:series": + series = metadataItem.Content; + break; + case "calibre:title_sort": + specialName = metadataItem.Content; + break; + } + } + + if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex) && !string.IsNullOrEmpty(specialName)) + { + return new ParserInfo() + { + Chapters = "0", + Edition = "", + Format = MangaFormat.Book, + Filename = Path.GetFileName(filePath), + Title = specialName, + FullFilePath = filePath, + IsSpecial = false, + Series = series, + Volumes = seriesIndex.Split(".")[0] + }; + } + } + catch (Exception) + { + // Swallow exception + } return new ParserInfo() { From 560b0c9cb580c447d68e149a3054e2fabc74f67f Mon Sep 17 00:00:00 2001 From: Kizaing Date: Tue, 8 Jun 2021 14:20:36 -0400 Subject: [PATCH 13/55] Feature/docker setup (#294) * Github Action for Docker Integration on merge into develop Co-authored-by: Chris Plaatjes --- .github/workflows/nightly-docker.yml | 35 ++++++++++-- action-build.sh | 83 ++++++++++++++++++++++++++++ copy_runtime.sh | 0 docker-build.sh | 6 +- 4 files changed, 117 insertions(+), 7 deletions(-) create mode 100755 action-build.sh mode change 100644 => 100755 copy_runtime.sh diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index c42f0a5eb..284a1cb68 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -13,12 +13,39 @@ jobs: - name: Check Out Repo uses: actions/checkout@v2 + - name: Check Out WebUI + uses: actions/checkout@v2 + with: + repository: Kareadita/Kavita-webui + ref: feature/sentry-release + path: Kavita-webui/ + + - name: NodeJS to Compile WebUI + uses: actions/setup-node@v2.1.5 + with: + node-version: '14' + - run: | + cd Kavita-webui/ || exit + npm install + npm run prod + mv dist/ ../API/wwwroot + cd ../ || exit + + - name: Compile dotnet app + uses: actions/setup-dotnet@v1 + with: + dotnet-version: '5.0.x' + - run: ./action-build.sh + - name: Login to Docker Hub uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v1 @@ -27,10 +54,10 @@ jobs: id: docker_build uses: docker/build-push-action@v2 with: - context: ./ - file: ./Dockerfile + context: . + platforms: linux/amd64,linux/arm/v7,linux/arm64 push: true - tags: kizaing/kavita:nightly-amd64 + tags: kizaing/kavita:nightly - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} \ No newline at end of file + run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/action-build.sh b/action-build.sh new file mode 100755 index 000000000..559765710 --- /dev/null +++ b/action-build.sh @@ -0,0 +1,83 @@ +#! /bin/bash +set -e + +outputFolder='_output' + +ProgressStart() +{ + echo "Start '$1'" +} + +ProgressEnd() +{ + echo "Finish '$1'" +} + +Build() +{ + local RID="$1" + + ProgressStart "Build for $RID" + + slnFile=Kavita.sln + + dotnet clean $slnFile -c Debug + dotnet clean $slnFile -c Release + + dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID + + ProgressEnd "Build for $RID" +} + +Package() +{ + local framework="$1" + local runtime="$2" + local lOutputFolder=../_output/"$runtime"/Kavita + + ProgressStart "Creating $runtime Package for $framework" + + # TODO: Use no-restore? Because Build should have already done it for us + echo "Building" + cd API + echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + + echo "Copying Install information" + cp ../INSTALL.txt "$lOutputFolder"/README.txt + + echo "Copying LICENSE" + cp ../LICENSE "$lOutputFolder"/LICENSE.txt + + echo "Renaming API -> Kavita" + mv "$lOutputFolder"/API "$lOutputFolder"/Kavita + + echo "Creating tar" + cd ../$outputFolder/"$runtime"/ + tar -czvf ../kavita-$runtime.tar.gz Kavita + + ProgressEnd "Creating $runtime Package for $framework" + +} + +dir=$PWD + +if [ -d _output ] +then + rm -r _output/ +fi + +#Build for x64 +Build "linux-x64" +Package "net5.0" "linux-x64" +cd "$dir" + +#Build for arm +Build "linux-arm" +Package "net5.0" "linux-arm" +cd "$dir" + +#Build for arm64 +Build "linux-arm64" +Package "net5.0" "linux-arm64" +cd "$dir" \ No newline at end of file diff --git a/copy_runtime.sh b/copy_runtime.sh old mode 100644 new mode 100755 diff --git a/docker-build.sh b/docker-build.sh index 05b05dda4..c97478910 100644 --- a/docker-build.sh +++ b/docker-build.sh @@ -15,7 +15,7 @@ ProgressEnd() Build() { - local RID="$1" + local RID="$1" ProgressStart 'Build for $RID' @@ -24,8 +24,8 @@ Build() dotnet clean $slnFile -c Debug dotnet clean $slnFile -c Release - dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID - + dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID + ProgressEnd 'Build for $RID' } From b61da7eb08d48f243573e58adbb7e89e8aac5cbb Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Tue, 8 Jun 2021 14:54:52 -0400 Subject: [PATCH 14/55] Update nightly-docker.yml (#295) Added discord notification action to nightly-docker workflow. https://github.com/marketplace/actions/discord-webhook-notify --- .github/workflows/nightly-docker.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 284a1cb68..704b7aa50 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -61,3 +61,12 @@ jobs: - name: Image digest run: echo ${{ steps.docker_build.outputs.digest }} + + - name: Notify Discord + uses: rjstone/discord-webhook-notify@v1 + with: + severity: info + description: + details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated' + text: A new nightly build has been released for docker. + webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }} \ No newline at end of file From 4ae9f078b0693bab2de8907b0e0dff40d807e95f Mon Sep 17 00:00:00 2001 From: Kizaing Date: Wed, 9 Jun 2021 11:10:06 -0400 Subject: [PATCH 15/55] Feature/docker stable setup (#296) * Added stable docker workflow * Changed webui pull to develop --- .github/workflows/nightly-docker.yml | 4 +- .github/workflows/stable-docker.yml | 63 ++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/stable-docker.yml diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 704b7aa50..329006f14 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -17,7 +17,7 @@ jobs: uses: actions/checkout@v2 with: repository: Kareadita/Kavita-webui - ref: feature/sentry-release + ref: develop path: Kavita-webui/ - name: NodeJS to Compile WebUI @@ -69,4 +69,4 @@ jobs: description: details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated' text: A new nightly build has been released for docker. - webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }} \ No newline at end of file + webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }} diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml new file mode 100644 index 000000000..7b8a68c6e --- /dev/null +++ b/.github/workflows/stable-docker.yml @@ -0,0 +1,63 @@ +name: CI to Docker Hub + +on: + push: + branches: + - 'main' + +jobs: + docker: + runs-on: ubuntu-latest + steps: + + - name: Check Out Repo + uses: actions/checkout@v2 + + - name: Check Out WebUI + uses: actions/checkout@v2 + with: + repository: Kareadita/Kavita-webui + ref: main + path: Kavita-webui/ + + - name: NodeJS to Compile WebUI + uses: actions/setup-node@v2.1.5 + with: + node-version: '14' + - run: | + cd Kavita-webui/ || exit + npm install + npm run prod + mv dist/ ../API/wwwroot + cd ../ || exit + + - name: Compile dotnet app + uses: actions/setup-dotnet@v1 + with: + dotnet-version: '5.0.x' + - run: ./action-build.sh + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: . + platforms: linux/amd64,linux/arm/v7,linux/arm64 + push: true + tags: kizaing/kavita:latest + + - name: Image digest + run: echo ${{ steps.docker_build.outputs.digest }} From 16a77fa8d696a3455bcec8af64931e25afd3c392 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 10 Jun 2021 07:47:35 -0500 Subject: [PATCH 16/55] Download Support (#298) * Implemented the ability to download files (series, volume, chapter) * Added RBS checks to ensure user is either an admin or has download role * Added the ability to change a users feature RBS. Changed the Role seed to use reflection --- .../Comparers/NaturalSortComparerTest.cs | 4 + API.Tests/Services/ArchiveServiceTests.cs | 7 +- API/Constants/PolicyConstants.cs | 4 + API/Controllers/AccountController.cs | 41 ++++++++ API/Controllers/DownloadController.cs | 96 +++++++++++++++++++ API/Controllers/ServerController.cs | 37 +++---- API/Controllers/SettingsController.cs | 12 +-- API/DTOs/UpdateRBSDto.cs | 10 ++ API/Data/Seed.cs | 13 ++- API/Data/SeriesRepository.cs | 11 +++ API/Data/VolumeRepository.cs | 12 +++ API/Extensions/IdentityServiceExtensions.cs | 1 + API/Interfaces/ISeriesRepository.cs | 1 + API/Interfaces/IVolumeRepository.cs | 1 + API/Interfaces/Services/IArchiveService.cs | 7 +- API/Services/ArchiveService.cs | 40 +++++++- API/Services/DirectoryService.cs | 10 ++ 17 files changed, 262 insertions(+), 45 deletions(-) create mode 100644 API/Controllers/DownloadController.cs create mode 100644 API/DTOs/UpdateRBSDto.cs diff --git a/API.Tests/Comparers/NaturalSortComparerTest.cs b/API.Tests/Comparers/NaturalSortComparerTest.cs index 099da0546..39bad2003 100644 --- a/API.Tests/Comparers/NaturalSortComparerTest.cs +++ b/API.Tests/Comparers/NaturalSortComparerTest.cs @@ -38,6 +38,10 @@ namespace API.Tests.Comparers new[] {"Batman - Black white vol 1 #04.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr"}, new[] {"Batman - Black white vol 1 #01.cbr", "Batman - Black white vol 1 #02.cbr", "Batman - Black white vol 1 #03.cbr", "Batman - Black white vol 1 #04.cbr"} )] + [InlineData( + new[] {"3and4.cbz", "The World God Only Knows - Oneshot.cbz", "5.cbz", "1and2.cbz"}, + new[] {"1and2.cbz", "3and4.cbz", "5.cbz", "The World God Only Knows - Oneshot.cbz"} + )] public void TestNaturalSortComparer(string[] input, string[] expected) { Array.Sort(input, _nc); diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs index d907ab75a..50d2d0673 100644 --- a/API.Tests/Services/ArchiveServiceTests.cs +++ b/API.Tests/Services/ArchiveServiceTests.cs @@ -16,11 +16,12 @@ namespace API.Tests.Services private readonly ITestOutputHelper _testOutputHelper; private readonly ArchiveService _archiveService; private readonly ILogger _logger = Substitute.For>(); + private readonly ILogger _directoryServiceLogger = Substitute.For>(); public ArchiveServiceTests(ITestOutputHelper testOutputHelper) { _testOutputHelper = testOutputHelper; - _archiveService = new ArchiveService(_logger); + _archiveService = new ArchiveService(_logger, new DirectoryService(_directoryServiceLogger)); } [Theory] @@ -154,7 +155,7 @@ namespace API.Tests.Services [InlineData("sorting.zip", "sorting.expected.jpg")] public void GetCoverImage_Default_Test(string inputFile, string expectedOutputFile) { - var archiveService = Substitute.For(_logger); + var archiveService = Substitute.For(_logger, new DirectoryService(_directoryServiceLogger)); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); archiveService.Configure().CanOpen(Path.Join(testDirectory, inputFile)).Returns(ArchiveLibrary.Default); @@ -174,7 +175,7 @@ namespace API.Tests.Services [InlineData("sorting.zip", "sorting.expected.jpg")] public void GetCoverImage_SharpCompress_Test(string inputFile, string expectedOutputFile) { - var archiveService = Substitute.For(_logger); + var archiveService = Substitute.For(_logger, new DirectoryService(_directoryServiceLogger)); var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages"); var expectedBytes = File.ReadAllBytes(Path.Join(testDirectory, expectedOutputFile)); diff --git a/API/Constants/PolicyConstants.cs b/API/Constants/PolicyConstants.cs index 6b6d93ae0..c76d71926 100644 --- a/API/Constants/PolicyConstants.cs +++ b/API/Constants/PolicyConstants.cs @@ -4,5 +4,9 @@ { public const string AdminRole = "Admin"; public const string PlebRole = "Pleb"; + /// + /// Used to give a user ability to download files from the server + /// + public const string DownloadRole = "Download"; } } \ No newline at end of file diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 8c3c05c85..4959fc5f4 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Reflection; using System.Threading.Tasks; using API.Constants; using API.DTOs; @@ -150,5 +151,45 @@ namespace API.Controllers Preferences = _mapper.Map(user.UserPreferences) }; } + + [HttpGet("roles")] + public ActionResult> GetRoles() + { + return typeof(PolicyConstants) + .GetFields(BindingFlags.Public | BindingFlags.Static) + .Where(f => f.FieldType == typeof(string)) + .ToDictionary(f => f.Name, + f => (string) f.GetValue(null)).Values.ToList(); + } + + [HttpPost("update-rbs")] + public async Task UpdateRoles(UpdateRbsDto updateRbsDto) + { + var user = await _userManager.Users + .Include(u => u.UserPreferences) + //.Include(u => u.UserRoles) + .SingleOrDefaultAsync(x => x.NormalizedUserName == updateRbsDto.Username.ToUpper()); + if (updateRbsDto.Roles.Contains(PolicyConstants.AdminRole) || + updateRbsDto.Roles.Contains(PolicyConstants.PlebRole)) + { + return BadRequest("Invalid Roles"); + } + + var existingRoles = (await _userManager.GetRolesAsync(user)) + .Where(s => s != PolicyConstants.AdminRole && s != PolicyConstants.PlebRole) + .ToList(); + + // Find what needs to be added and what needs to be removed + var rolesToRemove = existingRoles.Except(updateRbsDto.Roles); + var result = await _userManager.AddToRolesAsync(user, updateRbsDto.Roles); + + if (!result.Succeeded) return BadRequest("Something went wrong, unable to update user's roles"); + if ((await _userManager.RemoveFromRolesAsync(user, rolesToRemove)).Succeeded) + { + return Ok(); + } + return BadRequest("Something went wrong, unable to update user's roles"); + + } } } \ No newline at end of file diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs new file mode 100644 index 000000000..67d23ac8e --- /dev/null +++ b/API/Controllers/DownloadController.cs @@ -0,0 +1,96 @@ +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Extensions; +using API.Interfaces; +using API.Interfaces.Services; +using API.Services; +using Kavita.Common; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; + +namespace API.Controllers +{ + [Authorize(Policy = "RequireDownloadRole")] + public class DownloadController : BaseApiController + { + private readonly IUnitOfWork _unitOfWork; + private readonly IArchiveService _archiveService; + + public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService) + { + _unitOfWork = unitOfWork; + _archiveService = archiveService; + } + + [HttpGet("volume-size")] + public async Task> GetVolumeSize(int volumeId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); + return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + [HttpGet("chapter-size")] + public async Task> GetChapterSize(int chapterId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId); + return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + [HttpGet("series-size")] + public async Task> GetSeriesSize(int seriesId) + { + var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); + return Ok(DirectoryService.GetTotalSize(files.Select(c => c.FilePath))); + } + + [HttpGet("volume")] + public async Task DownloadVolume(int volumeId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); + try + { + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), + $"download_{User.GetUsername()}_v{volumeId}"); + return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + [HttpGet("chapter")] + public async Task DownloadChapter(int chapterId) + { + var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId); + try + { + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), + $"download_{User.GetUsername()}_c{chapterId}"); + return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + + [HttpGet("series")] + public async Task DownloadSeries(int seriesId) + { + var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); + try + { + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), + $"download_{User.GetUsername()}_s{seriesId}"); + return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); + } + catch (KavitaException ex) + { + return BadRequest(ex.Message); + } + } + } +} \ No newline at end of file diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 475323e07..7bedceb3f 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -5,6 +5,7 @@ using System.Threading.Tasks; using API.Extensions; using API.Interfaces.Services; using API.Services; +using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Configuration; @@ -19,19 +20,19 @@ namespace API.Controllers private readonly IHostApplicationLifetime _applicationLifetime; private readonly ILogger _logger; private readonly IConfiguration _config; - private readonly IDirectoryService _directoryService; private readonly IBackupService _backupService; + private readonly IArchiveService _archiveService; public ServerController(IHostApplicationLifetime applicationLifetime, ILogger logger, IConfiguration config, - IDirectoryService directoryService, IBackupService backupService) + IBackupService backupService, IArchiveService archiveService) { _applicationLifetime = applicationLifetime; _logger = logger; _config = config; - _directoryService = directoryService; _backupService = backupService; + _archiveService = archiveService; } - + [HttpPost("restart")] public ActionResult RestartServer() { @@ -45,33 +46,17 @@ namespace API.Controllers public async Task GetLogs() { var files = _backupService.LogFiles(_config.GetMaxRollingFiles(), _config.GetLoggingFileName()); - - var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); - var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); - - var tempLocation = Path.Join(tempDirectory, "logs_" + dateString); - DirectoryService.ExistOrCreate(tempLocation); - if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) - { - return BadRequest("Unable to copy files to temp directory for log download."); - } - - var zipPath = Path.Join(tempDirectory, $"kavita_logs_{dateString}.zip"); try { - ZipFile.CreateFromDirectory(tempLocation, zipPath); + var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files, "logs"); + return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); } - catch (AggregateException ex) + catch (KavitaException ex) { - _logger.LogError(ex, "There was an issue when archiving library backup"); - return BadRequest("There was an issue when archiving library backup"); + return BadRequest(ex.Message); } - var fileBytes = await _directoryService.ReadFileAsync(zipPath); - - DirectoryService.ClearAndDeleteDirectory(tempLocation); - (new FileInfo(zipPath)).Delete(); - - return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); } + + } } \ No newline at end of file diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index b0bc941af..b30d7fdd3 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -16,7 +16,7 @@ using Microsoft.Extensions.Logging; namespace API.Controllers { - [Authorize] + [Authorize(Policy = "RequireAdminRole")] public class SettingsController : BaseApiController { private readonly ILogger _logger; @@ -31,7 +31,7 @@ namespace API.Controllers _taskScheduler = taskScheduler; _configuration = configuration; } - + [HttpGet("")] public async Task> GetSettings() { @@ -40,8 +40,7 @@ namespace API.Controllers settingsDto.LoggingLevel = Configuration.GetLogLevel(Program.GetAppSettingFilename()); return Ok(settingsDto); } - - [Authorize(Policy = "RequireAdminRole")] + [HttpPost("")] public async Task> UpdateSettings(ServerSettingDto updateSettingsDto) { @@ -103,22 +102,19 @@ namespace API.Controllers _taskScheduler.ScheduleTasks(); return Ok(updateSettingsDto); } - - [Authorize(Policy = "RequireAdminRole")] + [HttpGet("task-frequencies")] public ActionResult> GetTaskFrequencies() { return Ok(CronConverter.Options); } - [Authorize(Policy = "RequireAdminRole")] [HttpGet("library-types")] public ActionResult> GetLibraryTypes() { return Ok(Enum.GetNames(typeof(LibraryType))); } - [Authorize(Policy = "RequireAdminRole")] [HttpGet("log-levels")] public ActionResult> GetLogLevels() { diff --git a/API/DTOs/UpdateRBSDto.cs b/API/DTOs/UpdateRBSDto.cs new file mode 100644 index 000000000..8bf37d314 --- /dev/null +++ b/API/DTOs/UpdateRBSDto.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; + +namespace API.DTOs +{ + public class UpdateRbsDto + { + public string Username { get; init; } + public IList Roles { get; init; } + } +} \ No newline at end of file diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index 511fb8c1c..01befd20c 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -1,6 +1,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Reflection; using System.Threading.Tasks; using API.Constants; using API.Entities; @@ -15,11 +16,13 @@ namespace API.Data { public static async Task SeedRoles(RoleManager roleManager) { - var roles = new List - { - new() {Name = PolicyConstants.AdminRole}, - new() {Name = PolicyConstants.PlebRole} - }; + var roles = typeof(PolicyConstants) + .GetFields(BindingFlags.Public | BindingFlags.Static) + .Where(f => f.FieldType == typeof(string)) + .ToDictionary(f => f.Name, + f => (string) f.GetValue(null)).Values + .Select(policyName => new AppRole() {Name = policyName}) + .ToList(); foreach (var role in roles) { diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs index c6575126b..0f725444b 100644 --- a/API/Data/SeriesRepository.cs +++ b/API/Data/SeriesRepository.cs @@ -411,5 +411,16 @@ namespace API.Data return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize); } + + public async Task> GetFilesForSeries(int seriesId) + { + return await _context.Volume + .Where(v => v.SeriesId == seriesId) + .Include(v => v.Chapters) + .ThenInclude(c => c.Files) + .SelectMany(v => v.Chapters.SelectMany(c => c.Files)) + .AsNoTracking() + .ToListAsync(); + } } } \ No newline at end of file diff --git a/API/Data/VolumeRepository.cs b/API/Data/VolumeRepository.cs index 6b9e541ea..78a078e03 100644 --- a/API/Data/VolumeRepository.cs +++ b/API/Data/VolumeRepository.cs @@ -65,6 +65,8 @@ namespace API.Data .SingleOrDefaultAsync(); } + + public async Task GetChapterDtoAsync(int chapterId) { @@ -84,5 +86,15 @@ namespace API.Data .AsNoTracking() .ToListAsync(); } + + public async Task> GetFilesForVolume(int volumeId) + { + return await _context.Chapter + .Where(c => volumeId == c.VolumeId) + .Include(c => c.Files) + .SelectMany(c => c.Files) + .AsNoTracking() + .ToListAsync(); + } } } \ No newline at end of file diff --git a/API/Extensions/IdentityServiceExtensions.cs b/API/Extensions/IdentityServiceExtensions.cs index 2d2a235f5..5310cf2ef 100644 --- a/API/Extensions/IdentityServiceExtensions.cs +++ b/API/Extensions/IdentityServiceExtensions.cs @@ -39,6 +39,7 @@ namespace API.Extensions services.AddAuthorization(opt => { opt.AddPolicy("RequireAdminRole", policy => policy.RequireRole(PolicyConstants.AdminRole)); + opt.AddPolicy("RequireDownloadRole", policy => policy.RequireRole(PolicyConstants.DownloadRole, PolicyConstants.AdminRole)); }); return services; diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs index 0b89d16b6..166ab05c3 100644 --- a/API/Interfaces/ISeriesRepository.cs +++ b/API/Interfaces/ISeriesRepository.cs @@ -61,5 +61,6 @@ namespace API.Interfaces Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams); Task GetSeriesMetadata(int seriesId); Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams); + Task> GetFilesForSeries(int seriesId); } } \ No newline at end of file diff --git a/API/Interfaces/IVolumeRepository.cs b/API/Interfaces/IVolumeRepository.cs index faf18abb8..b5ac06087 100644 --- a/API/Interfaces/IVolumeRepository.cs +++ b/API/Interfaces/IVolumeRepository.cs @@ -13,5 +13,6 @@ namespace API.Interfaces Task> GetFilesForChapter(int chapterId); Task> GetChaptersAsync(int volumeId); Task GetChapterCoverImageAsync(int chapterId); + Task> GetFilesForVolume(int volumeId); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IArchiveService.cs b/API/Interfaces/Services/IArchiveService.cs index aa5df49e2..f77784878 100644 --- a/API/Interfaces/Services/IArchiveService.cs +++ b/API/Interfaces/Services/IArchiveService.cs @@ -1,5 +1,9 @@ -using System.IO.Compression; +using System; +using System.Collections.Generic; +using System.IO.Compression; +using System.Threading.Tasks; using API.Archive; +using API.Entities; namespace API.Interfaces.Services { @@ -12,5 +16,6 @@ namespace API.Interfaces.Services string GetSummaryInfo(string archivePath); ArchiveLibrary CanOpen(string archivePath); bool ArchiveNeedsFlattening(ZipArchive archive); + Task> CreateZipForDownload(IEnumerable files, string tempFolder); } } \ No newline at end of file diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs index 1f99334b7..a90d429ed 100644 --- a/API/Services/ArchiveService.cs +++ b/API/Services/ArchiveService.cs @@ -4,12 +4,14 @@ using System.Diagnostics; using System.IO; using System.IO.Compression; using System.Linq; +using System.Threading.Tasks; using System.Xml.Serialization; using API.Archive; using API.Comparators; using API.Extensions; using API.Interfaces.Services; using API.Services.Tasks; +using Kavita.Common; using Microsoft.Extensions.Logging; using Microsoft.IO; using SharpCompress.Archives; @@ -25,13 +27,15 @@ namespace API.Services public class ArchiveService : IArchiveService { private readonly ILogger _logger; + private readonly IDirectoryService _directoryService; private const int ThumbnailWidth = 320; // 153w x 230h private static readonly RecyclableMemoryStreamManager StreamManager = new(); private readonly NaturalSortComparer _comparer; - public ArchiveService(ILogger logger) + public ArchiveService(ILogger logger, IDirectoryService directoryService) { _logger = logger; + _directoryService = directoryService; _comparer = new NaturalSortComparer(); } @@ -216,7 +220,39 @@ namespace API.Services !Path.HasExtension(archive.Entries.ElementAt(0).FullName) || archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !Parser.Parser.HasBlacklistedFolderInPath(e.FullName)); } - + + public async Task> CreateZipForDownload(IEnumerable files, string tempFolder) + { + var tempDirectory = Path.Join(Directory.GetCurrentDirectory(), "temp"); + var dateString = DateTime.Now.ToShortDateString().Replace("/", "_"); + + var tempLocation = Path.Join(tempDirectory, $"{tempFolder}_{dateString}"); + DirectoryService.ExistOrCreate(tempLocation); + if (!_directoryService.CopyFilesToDirectory(files, tempLocation)) + { + throw new KavitaException("Unable to copy files to temp directory archive download."); + } + + var zipPath = Path.Join(tempDirectory, $"kavita_{tempFolder}_{dateString}.zip"); + try + { + ZipFile.CreateFromDirectory(tempLocation, zipPath); + } + catch (AggregateException ex) + { + _logger.LogError(ex, "There was an issue creating temp archive"); + throw new KavitaException("There was an issue creating temp archive"); + } + + + var fileBytes = await _directoryService.ReadFileAsync(zipPath); + + DirectoryService.ClearAndDeleteDirectory(tempLocation); + (new FileInfo(zipPath)).Delete(); + + return Tuple.Create(fileBytes, zipPath); + } + private byte[] CreateThumbnail(string entryName, Stream stream, string formatExtension = ".jpg") { if (!formatExtension.StartsWith(".")) diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index 40271ccd0..ac5f5ec5e 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -102,6 +102,16 @@ namespace API.Services return !Directory.Exists(path) ? Array.Empty() : Directory.GetFiles(path); } + /// + /// Returns the total number of bytes for a given set of full file paths + /// + /// + /// Total bytes + public static long GetTotalSize(IEnumerable paths) + { + return paths.Sum(path => new FileInfo(path).Length); + } + /// /// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases. /// From f8aba21acda38a19a0e65df32248ae8a8c76c23c Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 14 Jun 2021 17:21:01 -0500 Subject: [PATCH 17/55] Removed "Anthology" from being a special parsing keyword as series are being found where "Anthology" is in the series name. (#304) --- API.Tests/Parser/MangaParserTests.cs | 2 +- API/Parser/Parser.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index e09166585..bba6f60e8 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -241,7 +241,7 @@ namespace API.Tests.Parser [InlineData("Ani-Hina Art Collection.cbz", true)] [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] [InlineData("A Town Where You Live - Bonus Chapter.zip", true)] - [InlineData("Yuki Merry - 4-Komga Anthology", true)] + [InlineData("Yuki Merry - 4-Komga Anthology", false)] public void ParseMangaSpecialTest(string input, bool expected) { Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 050a6ec29..79fed5157 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -372,7 +372,7 @@ namespace API.Parser { // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. new Regex( - @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)", RegexOptions.IgnoreCase | RegexOptions.Compiled), }; From 46b60405b1e0d3330606c0a605b3dc59e1dba17f Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 14 Jun 2021 17:35:13 -0500 Subject: [PATCH 18/55] Special Markers (#305) * Removed "Anthology" from being a special parsing keyword as series are being found where "Anthology" is in the series name. * SP# is now a way to force the file to be a special rather than pushing it into a Specials folder. --- API.Tests/Parser/MangaParserTests.cs | 2 ++ API/Parser/Parser.cs | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index bba6f60e8..2ec34318a 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -242,6 +242,8 @@ namespace API.Tests.Parser [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] [InlineData("A Town Where You Live - Bonus Chapter.zip", true)] [InlineData("Yuki Merry - 4-Komga Anthology", false)] + [InlineData("Beastars - SP01", true)] + [InlineData("Beastars SP01", true)] public void ParseMangaSpecialTest(string input, bool expected) { Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 79fed5157..d453a7586 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -374,6 +374,10 @@ namespace API.Parser new Regex( @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found. + new Regex( + @"(?SP\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), }; From 584348c6ad68d4ff478fe7a594d31ead4701637f Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Mon, 14 Jun 2021 21:12:37 -0500 Subject: [PATCH 19/55] Special Marker Changes (#306) * SP# is now a way to force the file to be a special rather than pushing it into a Specials folder. * Made it so if there is a Special (for any Parse call), volume and chapters will be ignored. * Fixed a unit test missing Theory and fixed a regex case --- API.Tests/Parser/MangaParserTests.cs | 5 ++- API.Tests/Parser/ParserTest.cs | 10 +++++ API/Parser/Parser.cs | 59 +++++++++++++++++++++------- API/Parser/ParserInfo.cs | 2 +- API/Services/Tasks/ScannerService.cs | 2 +- 5 files changed, 60 insertions(+), 18 deletions(-) diff --git a/API.Tests/Parser/MangaParserTests.cs b/API.Tests/Parser/MangaParserTests.cs index 2ec34318a..1123404b0 100644 --- a/API.Tests/Parser/MangaParserTests.cs +++ b/API.Tests/Parser/MangaParserTests.cs @@ -145,6 +145,7 @@ namespace API.Tests.Parser [InlineData("X-Men v1 #201 (September 2007).cbz", "X-Men")] [InlineData("Kodoja #001 (March 2016)", "Kodoja")] [InlineData("Boku No Kokoro No Yabai Yatsu - Chapter 054 I Prayed At The Shrine (V0).cbz", "Boku No Kokoro No Yabai Yatsu")] + [InlineData("Kiss x Sis - Ch.36 - A Cold Home Visit.cbz", "Kiss x Sis")] public void ParseSeriesTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); @@ -242,8 +243,8 @@ namespace API.Tests.Parser [InlineData("Gifting The Wonderful World With Blessings! - 3 Side Stories [yuNS][Unknown]", true)] [InlineData("A Town Where You Live - Bonus Chapter.zip", true)] [InlineData("Yuki Merry - 4-Komga Anthology", false)] - [InlineData("Beastars - SP01", true)] - [InlineData("Beastars SP01", true)] + [InlineData("Beastars - SP01", false)] + [InlineData("Beastars SP01", false)] public void ParseMangaSpecialTest(string input, bool expected) { Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseMangaSpecial(input))); diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs index 314c7cd11..4a1a1babd 100644 --- a/API.Tests/Parser/ParserTest.cs +++ b/API.Tests/Parser/ParserTest.cs @@ -5,6 +5,16 @@ namespace API.Tests.Parser { public class ParserTests { + + [Theory] + [InlineData("Beastars - SP01", true)] + [InlineData("Beastars SP01", true)] + [InlineData("Beastars Special 01", false)] + [InlineData("Beastars Extra 01", false)] + public void HasSpecialTest(string input, bool expected) + { + Assert.Equal(expected, HasSpecialMarker(input)); + } [Theory] [InlineData("0001", "1")] diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index d453a7586..149d5a940 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -9,9 +9,12 @@ namespace API.Parser { public static class Parser { - public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7"; - public static readonly string BookFileExtensions = @"\.epub"; - public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; + public const string DefaultChapter = "0"; + public const string DefaultVolume = "0"; + + public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7"; + public const string BookFileExtensions = @"\.epub"; + public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; public static readonly Regex FontSrcUrlRegex = new Regex("(src:url\\(\"?'?)([a-z0-9/\\._]+)(\"?'?\\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled); @@ -92,7 +95,7 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) new Regex( - @"(?.*) (\b|_|-)v", + @"(?.*) (\b|_|-)(v|ch\.?|c)\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip // due to duplicate version identifiers in file. @@ -374,12 +377,14 @@ namespace API.Parser new Regex( @"(?Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)", RegexOptions.IgnoreCase | RegexOptions.Compiled), - // If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found. - new Regex( - @"(?SP\d+)", - RegexOptions.IgnoreCase | RegexOptions.Compiled), }; + // If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found. + private static readonly Regex SpecialMarkerRegex = new Regex( + @"(?SP\d+)", + RegexOptions.IgnoreCase | RegexOptions.Compiled + ); + /// /// Parses information out of a file path. Will fallback to using directory name if Series couldn't be parsed @@ -428,7 +433,7 @@ namespace API.Parser { var folder = fallbackFolders[i]; if (!string.IsNullOrEmpty(ParseMangaSpecial(folder))) continue; - if (ParseVolume(folder) != "0" || ParseChapter(folder) != "0") continue; + if (ParseVolume(folder) != DefaultVolume || ParseChapter(folder) != DefaultChapter) continue; var series = ParseSeries(folder); @@ -457,10 +462,17 @@ namespace API.Parser var isSpecial = ParseMangaSpecial(fileName); // We must ensure that we can only parse a special out. As some files will have v20 c171-180+Omake and that // could cause a problem as Omake is a special term, but there is valid volume/chapter information. - if (ret.Chapters == "0" && ret.Volumes == "0" && !string.IsNullOrEmpty(isSpecial)) + if (ret.Chapters == DefaultChapter && ret.Volumes == DefaultVolume && !string.IsNullOrEmpty(isSpecial)) { ret.IsSpecial = true; } + + if (HasSpecialMarker(fileName)) + { + ret.IsSpecial = true; + ret.Chapters = DefaultChapter; + ret.Volumes = DefaultVolume; + } @@ -495,6 +507,25 @@ namespace API.Parser return string.Empty; } + /// + /// If the file has SP marker. + /// + /// + /// + public static bool HasSpecialMarker(string filePath) + { + var matches = SpecialMarkerRegex.Matches(filePath); + foreach (Match match in matches) + { + if (match.Groups["Special"].Success && match.Groups["Special"].Value != string.Empty) + { + return true; + } + } + + return false; + } + public static string ParseMangaSpecial(string filePath) { foreach (var regex in MangaSpecialRegex) @@ -564,7 +595,7 @@ namespace API.Parser } } - return "0"; + return DefaultVolume; } public static string ParseComicVolume(string filename) @@ -586,7 +617,7 @@ namespace API.Parser } } - return "0"; + return DefaultVolume; } public static string ParseChapter(string filename) @@ -614,7 +645,7 @@ namespace API.Parser } } - return "0"; + return DefaultChapter; } private static string AddChapterPart(string value) @@ -652,7 +683,7 @@ namespace API.Parser } } - return "0"; + return DefaultChapter; } private static string RemoveEditionTagHolders(string title) diff --git a/API/Parser/ParserInfo.cs b/API/Parser/ParserInfo.cs index e49d87e74..a2c4a9c51 100644 --- a/API/Parser/ParserInfo.cs +++ b/API/Parser/ParserInfo.cs @@ -3,7 +3,7 @@ namespace API.Parser { /// - /// This represents a single file + /// This represents all parsed information from a single file /// public class ParserInfo { diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index e22803c4b..91e873d13 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -466,7 +466,7 @@ namespace API.Services.Tasks return; } - if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != "0") + if (type == LibraryType.Book && Parser.Parser.IsEpub(path) && Parser.Parser.ParseVolume(info.Series) != Parser.Parser.DefaultVolume) { info = Parser.Parser.Parse(path, rootPath, type); var info2 = _bookService.ParseInfo(path); From d02d2d3cb560d470be69bd6f49e856a2b20582b5 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Tue, 15 Jun 2021 09:51:37 -0500 Subject: [PATCH 20/55] Epub 3.2 Collection Tag support (#308) * Hooked up logic for collections based on EPUB3.2 Spec and Fixed improper tags in EPUBs since it is XML and we are using HTML to parse it. * Fixed a bug with src:url url replacing so that it's much cleaner regex --- API/Controllers/BookController.cs | 3 +++ API/Parser/Parser.cs | 2 +- API/Services/BookService.cs | 45 +++++++++++++++++++++++++++---- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/API/Controllers/BookController.cs b/API/Controllers/BookController.cs index a2af28ab6..e5a980467 100644 --- a/API/Controllers/BookController.cs +++ b/API/Controllers/BookController.cs @@ -186,6 +186,9 @@ namespace API.Controllers var content = await contentFileRef.ReadContentAsync(); if (contentFileRef.ContentType != EpubContentType.XHTML_1_1) return Ok(content); + // In more cases than not, due to this being XML not HTML, we need to escape the script tags. + content = BookService.EscapeTags(content); + doc.LoadHtml(content); var body = doc.DocumentNode.SelectSingleNode("//body"); diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index 149d5a940..efcb6c550 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -15,7 +15,7 @@ namespace API.Parser public const string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|\.tar.gz|\.7zip|\.7z|.cb7"; public const string BookFileExtensions = @"\.epub"; public const string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)"; - public static readonly Regex FontSrcUrlRegex = new Regex("(src:url\\(\"?'?)([a-z0-9/\\._]+)(\"?'?\\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); + public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))", RegexOptions.IgnoreCase | RegexOptions.Compiled); public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?[\\w\\d/\\._-]+)([\"|'];?)", RegexOptions.IgnoreCase | RegexOptions.Compiled); private static readonly string XmlRegexExtensions = @"\.xml"; diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index 3d35919f0..b0252f122 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -23,7 +23,7 @@ namespace API.Services private const int ThumbnailWidth = 320; // 153w x 230h private readonly StylesheetParser _cssParser = new (); - + public BookService(ILogger logger) { _logger = logger; @@ -204,6 +204,13 @@ namespace API.Services return 0; } + public static string EscapeTags(string content) + { + content = Regex.Replace(content, @")", ""); + content = Regex.Replace(content, @")", ""); + return content; + } + public static string CleanContentKeys(string key) { return key.Replace("../", string.Empty); @@ -241,14 +248,23 @@ namespace API.Services // // If all three are present, we can take that over dc:title and format as: // Series = The Dark Tower, Volume = 5, Filename as "Wolves of the Calla" + // In addition, the following can exist and should parse as a series (EPUB 3.2 spec) + // + // The Lord of the Rings + // + // set + // 2 try { - string seriesIndex = string.Empty; - string series = string.Empty; - string specialName = string.Empty; + var seriesIndex = string.Empty; + var series = string.Empty; + var specialName = string.Empty; + var groupPosition = string.Empty; + foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems) { + // EPUB 2 and 3 switch (metadataItem.Name) { case "calibre:series_index": @@ -261,10 +277,29 @@ namespace API.Services specialName = metadataItem.Content; break; } + + // EPUB 3.2+ only + switch (metadataItem.Property) + { + case "group-position": + seriesIndex = metadataItem.Content; + break; + case "belongs-to-collection": + series = metadataItem.Content; + break; + case "collection-type": + groupPosition = metadataItem.Content; + break; + } } - if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex) && !string.IsNullOrEmpty(specialName)) + if (!string.IsNullOrEmpty(series) && !string.IsNullOrEmpty(seriesIndex) && + (!string.IsNullOrEmpty(specialName) || groupPosition.Equals("series") || groupPosition.Equals("set"))) { + if (string.IsNullOrEmpty(specialName)) + { + specialName = epubBook.Title; + } return new ParserInfo() { Chapters = "0", From becf2ec7a6cfafb1df72d8994bae8cb8056030bc Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Wed, 16 Jun 2021 10:58:50 -0500 Subject: [PATCH 21/55] Changed how series parsing works. Now at the end of the Parse() call, if we still haven't figured out the Series, we will default to taking the file name and cleaning it. This allows files that have no numbers to be picked up. (#310) Series Parsing now, at the end of the Parse() call if we still haven't figured out the Series, will default to taking the file name and cleaning it. This allows files that have no numbers to be picked up. --- API/Parser/Parser.cs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index efcb6c550..e6ac20a1f 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -473,8 +473,11 @@ namespace API.Parser ret.Chapters = DefaultChapter; ret.Volumes = DefaultVolume; } - - + + if (string.IsNullOrEmpty(ret.Series)) + { + ret.Series = CleanTitle(fileName); + } return ret.Series == string.Empty ? null : ret; } From 95db721508063f1107ec6f351e34503c8683ac29 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 17 Jun 2021 18:06:53 -0500 Subject: [PATCH 22/55] Create FUNDING.yml --- .github/FUNDING.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..05704ad08 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +#github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +#patreon: # Replace with a single Patreon username +#open_collective: # Replace with a single Open Collective username +#ko_fi: # Replace with a single Ko-fi username +#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +#liberapay: # Replace with a single Liberapay username +#issuehunt: # Replace with a single IssueHunt username +#otechie: # Replace with a single Otechie username +custom: ["https://paypal.me/majora2007?locale.x=en_US"] From a9db07e7ffe00a92b12ce0379dc538a6a7d724b5 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 17 Jun 2021 18:07:51 -0500 Subject: [PATCH 23/55] Create FUNDING.yml Updated link to be shorter --- FUNDING.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 FUNDING.yml diff --git a/FUNDING.yml b/FUNDING.yml new file mode 100644 index 000000000..a5717dec4 --- /dev/null +++ b/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: ["https://paypal.me/majora2007"] From d2e444910d96330b660c8adbb9c0e4d9478c94dc Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 17 Jun 2021 18:08:22 -0500 Subject: [PATCH 24/55] Update FUNDING.yml Updated link --- .github/FUNDING.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 05704ad08..a5717dec4 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,12 +1,12 @@ # These are supported funding model platforms -#github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] -#patreon: # Replace with a single Patreon username -#open_collective: # Replace with a single Open Collective username -#ko_fi: # Replace with a single Ko-fi username -#tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel -#community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -#liberapay: # Replace with a single Liberapay username -#issuehunt: # Replace with a single IssueHunt username -#otechie: # Replace with a single Otechie username -custom: ["https://paypal.me/majora2007?locale.x=en_US"] +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: ["https://paypal.me/majora2007"] From 6e1b227e65811b49c2968f8c8ef261987c97373a Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 18 Jun 2021 07:37:48 -0500 Subject: [PATCH 25/55] Transaction Support (#309) * Added transactions to UnitOfWork and refactored code to use it. * This included blank UI fix from Kavita-webui --- API/Controllers/AccountController.cs | 88 ++++++++++++++--------- API/Controllers/CollectionController.cs | 61 ++++++++-------- API/Controllers/LibraryController.cs | 6 +- API/Controllers/ReaderController.cs | 8 +-- API/Controllers/SeriesController.cs | 95 +++++++++++++------------ API/Controllers/SettingsController.cs | 2 +- API/Controllers/UsersController.cs | 4 +- API/Data/UnitOfWork.cs | 17 ++++- API/Interfaces/IUnitOfWork.cs | 5 +- API/Services/MetadataService.cs | 4 +- API/Services/Tasks/ScannerService.cs | 2 +- 11 files changed, 168 insertions(+), 124 deletions(-) diff --git a/API/Controllers/AccountController.cs b/API/Controllers/AccountController.cs index 4959fc5f4..876cecf84 100644 --- a/API/Controllers/AccountController.cs +++ b/API/Controllers/AccountController.cs @@ -83,42 +83,55 @@ namespace API.Controllers [HttpPost("register")] public async Task> Register(RegisterDto registerDto) { - if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper())) + try { - return BadRequest("Username is taken."); - } - - var user = _mapper.Map(registerDto); - user.UserPreferences ??= new AppUserPreferences(); - - var result = await _userManager.CreateAsync(user, registerDto.Password); - - if (!result.Succeeded) return BadRequest(result.Errors); - - var role = registerDto.IsAdmin ? PolicyConstants.AdminRole : PolicyConstants.PlebRole; - var roleResult = await _userManager.AddToRoleAsync(user, role); - - if (!roleResult.Succeeded) return BadRequest(result.Errors); - - // When we register an admin, we need to grant them access to all Libraries. - if (registerDto.IsAdmin) - { - _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", user.UserName); - var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); - foreach (var lib in libraries) + if (await _userManager.Users.AnyAsync(x => x.NormalizedUserName == registerDto.Username.ToUpper())) { - lib.AppUsers ??= new List(); - lib.AppUsers.Add(user); + return BadRequest("Username is taken."); } - if (libraries.Any() && !await _unitOfWork.Complete()) _logger.LogError("There was an issue granting library access. Please do this manually"); + + var user = _mapper.Map(registerDto); + user.UserPreferences ??= new AppUserPreferences(); + + var result = await _userManager.CreateAsync(user, registerDto.Password); + + if (!result.Succeeded) return BadRequest(result.Errors); + + var role = registerDto.IsAdmin ? PolicyConstants.AdminRole : PolicyConstants.PlebRole; + var roleResult = await _userManager.AddToRoleAsync(user, role); + + if (!roleResult.Succeeded) return BadRequest(result.Errors); + + // When we register an admin, we need to grant them access to all Libraries. + if (registerDto.IsAdmin) + { + _logger.LogInformation("{UserName} is being registered as admin. Granting access to all libraries", + user.UserName); + var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesAsync()).ToList(); + foreach (var lib in libraries) + { + lib.AppUsers ??= new List(); + lib.AppUsers.Add(user); + } + + if (libraries.Any() && !await _unitOfWork.CommitAsync()) + _logger.LogError("There was an issue granting library access. Please do this manually"); + } + + return new UserDto + { + Username = user.UserName, + Token = await _tokenService.CreateToken(user), + Preferences = _mapper.Map(user.UserPreferences) + }; + } + catch (Exception ex) + { + _logger.LogError(ex, "Something went wrong when registering user"); + await _unitOfWork.RollbackAsync(); } - return new UserDto - { - Username = user.UserName, - Token = await _tokenService.CreateToken(user), - Preferences = _mapper.Map(user.UserPreferences) - }; + return BadRequest("Something went wrong when registering user"); } [HttpPost("login")] @@ -140,7 +153,7 @@ namespace API.Controllers user.UserPreferences ??= new AppUserPreferences(); _unitOfWork.UserRepository.Update(user); - await _unitOfWork.Complete(); + await _unitOfWork.CommitAsync(); _logger.LogInformation("{UserName} logged in at {Time}", user.UserName, user.LastActive); @@ -167,7 +180,6 @@ namespace API.Controllers { var user = await _userManager.Users .Include(u => u.UserPreferences) - //.Include(u => u.UserRoles) .SingleOrDefaultAsync(x => x.NormalizedUserName == updateRbsDto.Username.ToUpper()); if (updateRbsDto.Roles.Contains(PolicyConstants.AdminRole) || updateRbsDto.Roles.Contains(PolicyConstants.PlebRole)) @@ -178,16 +190,22 @@ namespace API.Controllers var existingRoles = (await _userManager.GetRolesAsync(user)) .Where(s => s != PolicyConstants.AdminRole && s != PolicyConstants.PlebRole) .ToList(); - + // Find what needs to be added and what needs to be removed var rolesToRemove = existingRoles.Except(updateRbsDto.Roles); var result = await _userManager.AddToRolesAsync(user, updateRbsDto.Roles); - if (!result.Succeeded) return BadRequest("Something went wrong, unable to update user's roles"); + if (!result.Succeeded) + { + await _unitOfWork.RollbackAsync(); + return BadRequest("Something went wrong, unable to update user's roles"); + } if ((await _userManager.RemoveFromRolesAsync(user, rolesToRemove)).Succeeded) { return Ok(); } + + await _unitOfWork.RollbackAsync(); return BadRequest("Something went wrong, unable to update user's roles"); } diff --git a/API/Controllers/CollectionController.cs b/API/Controllers/CollectionController.cs index 27455a283..6ad5fdbaf 100644 --- a/API/Controllers/CollectionController.cs +++ b/API/Controllers/CollectionController.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using API.Constants; @@ -33,11 +34,7 @@ namespace API.Controllers { return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); } - else - { - return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); - } - + return await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); } [Authorize(Policy = "RequireAdminRole")] @@ -64,7 +61,7 @@ namespace API.Controllers if (_unitOfWork.HasChanges()) { - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok("Tag updated successfully"); } @@ -81,38 +78,42 @@ namespace API.Controllers [HttpPost("update-series")] public async Task UpdateSeriesForTag(UpdateSeriesForTagDto updateSeriesForTagDto) { - var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); - if (tag == null) return BadRequest("Not a valid Tag"); - tag.SeriesMetadatas ??= new List(); - - // Check if Tag has updated (Summary) - if (tag.Summary == null || !tag.Summary.Equals(updateSeriesForTagDto.Tag.Summary)) + try { - tag.Summary = updateSeriesForTagDto.Tag.Summary; - _unitOfWork.CollectionTagRepository.Update(tag); - } + var tag = await _unitOfWork.CollectionTagRepository.GetFullTagAsync(updateSeriesForTagDto.Tag.Id); + if (tag == null) return BadRequest("Not a valid Tag"); + tag.SeriesMetadatas ??= new List(); - foreach (var seriesIdToRemove in updateSeriesForTagDto.SeriesIdsToRemove) - { - tag.SeriesMetadatas.Remove(tag.SeriesMetadatas.Single(sm => sm.SeriesId == seriesIdToRemove)); - } - + // Check if Tag has updated (Summary) + if (tag.Summary == null || !tag.Summary.Equals(updateSeriesForTagDto.Tag.Summary)) + { + tag.Summary = updateSeriesForTagDto.Tag.Summary; + _unitOfWork.CollectionTagRepository.Update(tag); + } - if (tag.SeriesMetadatas.Count == 0) - { - _unitOfWork.CollectionTagRepository.Remove(tag); - } + foreach (var seriesIdToRemove in updateSeriesForTagDto.SeriesIdsToRemove) + { + tag.SeriesMetadatas.Remove(tag.SeriesMetadatas.Single(sm => sm.SeriesId == seriesIdToRemove)); + } - if (_unitOfWork.HasChanges() && await _unitOfWork.Complete()) + + if (tag.SeriesMetadatas.Count == 0) + { + _unitOfWork.CollectionTagRepository.Remove(tag); + } + + if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) + { + return Ok("Tag updated"); + } + } + catch (Exception) { - return Ok("Tag updated"); + await _unitOfWork.RollbackAsync(); } return BadRequest("Something went wrong. Please try again."); } - - - } } \ No newline at end of file diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs index 72a91f1fb..352b9f873 100644 --- a/API/Controllers/LibraryController.cs +++ b/API/Controllers/LibraryController.cs @@ -67,7 +67,7 @@ namespace API.Controllers } - if (!await _unitOfWork.Complete()) return BadRequest("There was a critical issue. Please try again."); + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue. Please try again."); _logger.LogInformation("Created a new library: {LibraryName}", library.Name); _taskScheduler.ScanLibrary(library.Id); @@ -133,7 +133,7 @@ namespace API.Controllers return Ok(_mapper.Map(user)); } - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { _logger.LogInformation("Added: {SelectedLibraries} to {Username}",libraryString, updateLibraryForUserDto.Username); return Ok(_mapper.Map(user)); @@ -199,7 +199,7 @@ namespace API.Controllers _unitOfWork.LibraryRepository.Update(library); - if (!await _unitOfWork.Complete()) return BadRequest("There was a critical issue updating the library."); + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue updating the library."); if (differenceBetweenFolders.Any()) { _taskScheduler.ScanLibrary(library.Id, true); diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index c364fac48..2ac3d51fe 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -116,7 +116,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -157,7 +157,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -198,7 +198,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -251,7 +251,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs index caa55b229..3780538ad 100644 --- a/API/Controllers/SeriesController.cs +++ b/API/Controllers/SeriesController.cs @@ -114,7 +114,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(user); - if (!await _unitOfWork.Complete()) return BadRequest("There was a critical error."); + if (!await _unitOfWork.CommitAsync()) return BadRequest("There was a critical error."); return Ok(); } @@ -139,7 +139,7 @@ namespace API.Controllers _unitOfWork.SeriesRepository.Update(series); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(); } @@ -190,61 +190,68 @@ namespace API.Controllers [HttpPost("metadata")] public async Task UpdateSeriesMetadata(UpdateSeriesMetadataDto updateSeriesMetadataDto) { - var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId; - var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); - if (series.Metadata == null) + try { - series.Metadata = DbFactory.SeriesMetadata(updateSeriesMetadataDto.Tags - .Select(dto => DbFactory.CollectionTag(dto.Id, dto.Title, dto.Summary, dto.Promoted)).ToList()); - } - else - { - series.Metadata.CollectionTags ??= new List(); - var newTags = new List(); - - // I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different - var existingTags = series.Metadata.CollectionTags.ToList(); - foreach (var existing in existingTags) + var seriesId = updateSeriesMetadataDto.SeriesMetadata.SeriesId; + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); + if (series.Metadata == null) { - if (updateSeriesMetadataDto.Tags.SingleOrDefault(t => t.Id == existing.Id) == null) + series.Metadata = DbFactory.SeriesMetadata(updateSeriesMetadataDto.Tags + .Select(dto => DbFactory.CollectionTag(dto.Id, dto.Title, dto.Summary, dto.Promoted)).ToList()); + } + else + { + series.Metadata.CollectionTags ??= new List(); + var newTags = new List(); + + // I want a union of these 2 lists. Return only elements that are in both lists, but the list types are different + var existingTags = series.Metadata.CollectionTags.ToList(); + foreach (var existing in existingTags) { - // Remove tag - series.Metadata.CollectionTags.Remove(existing); + if (updateSeriesMetadataDto.Tags.SingleOrDefault(t => t.Id == existing.Id) == null) + { + // Remove tag + series.Metadata.CollectionTags.Remove(existing); + } + } + + // At this point, all tags that aren't in dto have been removed. + foreach (var tag in updateSeriesMetadataDto.Tags) + { + var existingTag = series.Metadata.CollectionTags.SingleOrDefault(t => t.Title == tag.Title); + if (existingTag != null) + { + // Update existingTag + existingTag.Promoted = tag.Promoted; + existingTag.Title = tag.Title; + existingTag.NormalizedTitle = Parser.Parser.Normalize(tag.Title).ToUpper(); + } + else + { + // Add new tag + newTags.Add(DbFactory.CollectionTag(tag.Id, tag.Title, tag.Summary, tag.Promoted)); + } + } + + foreach (var tag in newTags) + { + series.Metadata.CollectionTags.Add(tag); } } - // At this point, all tags that aren't in dto have been removed. - foreach (var tag in updateSeriesMetadataDto.Tags) + if (!_unitOfWork.HasChanges()) { - var existingTag = series.Metadata.CollectionTags.SingleOrDefault(t => t.Title == tag.Title); - if (existingTag != null) - { - // Update existingTag - existingTag.Promoted = tag.Promoted; - existingTag.Title = tag.Title; - existingTag.NormalizedTitle = Parser.Parser.Normalize(tag.Title).ToUpper(); - } - else - { - // Add new tag - newTags.Add(DbFactory.CollectionTag(tag.Id, tag.Title, tag.Summary, tag.Promoted)); - } + return Ok("No changes to save"); } - foreach (var tag in newTags) + if (await _unitOfWork.CommitAsync()) { - series.Metadata.CollectionTags.Add(tag); + return Ok("Successfully updated"); } } - - if (!_unitOfWork.HasChanges()) + catch (Exception) { - return Ok("No changes to save"); - } - - if (await _unitOfWork.Complete()) - { - return Ok("Successfully updated"); + await _unitOfWork.RollbackAsync(); } return BadRequest("Could not update metadata"); diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index b30d7fdd3..ce68e4e5a 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -95,7 +95,7 @@ namespace API.Controllers _configuration.GetSection("Logging:LogLevel:Default").Value = updateSettingsDto.LoggingLevel + ""; if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated"); - if (!_unitOfWork.HasChanges() || !await _unitOfWork.Complete()) + if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync()) return BadRequest("There was a critical issue. Please try again."); _logger.LogInformation("Server Settings updated"); diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index 10d6d3e07..3a9a44d6c 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -26,7 +26,7 @@ namespace API.Controllers var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(username); _unitOfWork.UserRepository.Delete(user); - if (await _unitOfWork.Complete()) return Ok(); + if (await _unitOfWork.CommitAsync()) return Ok(); return BadRequest("Could not delete the user."); } @@ -71,7 +71,7 @@ namespace API.Controllers _unitOfWork.UserRepository.Update(existingPreferences); - if (await _unitOfWork.Complete()) + if (await _unitOfWork.CommitAsync()) { return Ok(preferencesDto); } diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index 178136e3a..ba89d0612 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -30,7 +30,11 @@ namespace API.Data public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper); - public async Task Complete() + public bool Commit() + { + return _context.SaveChanges() > 0; + } + public async Task CommitAsync() { return await _context.SaveChangesAsync() > 0; } @@ -39,5 +43,16 @@ namespace API.Data { return _context.ChangeTracker.HasChanges(); } + + public async Task RollbackAsync() + { + await _context.DisposeAsync(); + return true; + } + public bool Rollback() + { + _context.Dispose(); + return true; + } } } \ No newline at end of file diff --git a/API/Interfaces/IUnitOfWork.cs b/API/Interfaces/IUnitOfWork.cs index 8f4b53c8f..df326c3e2 100644 --- a/API/Interfaces/IUnitOfWork.cs +++ b/API/Interfaces/IUnitOfWork.cs @@ -11,7 +11,10 @@ namespace API.Interfaces ISettingsRepository SettingsRepository { get; } IAppUserProgressRepository AppUserProgressRepository { get; } ICollectionTagRepository CollectionTagRepository { get; } - Task Complete(); + bool Commit(); + Task CommitAsync(); bool HasChanges(); + bool Rollback(); + Task RollbackAsync(); } } \ No newline at end of file diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs index 122fc90c6..6ee2856ab 100644 --- a/API/Services/MetadataService.cs +++ b/API/Services/MetadataService.cs @@ -158,7 +158,7 @@ namespace API.Services } - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) { _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds); } @@ -191,7 +191,7 @@ namespace API.Services _unitOfWork.SeriesRepository.Update(series); - if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.Complete()).Result) + if (_unitOfWork.HasChanges() && Task.Run(() => _unitOfWork.CommitAsync()).Result) { _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); } diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 91e873d13..232e8fce0 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -89,7 +89,7 @@ namespace API.Services.Tasks UpdateLibrary(library, series); _unitOfWork.LibraryRepository.Update(library); - if (Task.Run(() => _unitOfWork.Complete()).Result) + if (Task.Run(() => _unitOfWork.CommitAsync()).Result) { _logger.LogInformation("Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); } From 1036c731ad855e4a21d2214c0c60ff4c27afd0de Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 18 Jun 2021 09:49:19 -0500 Subject: [PATCH 26/55] When performing a download, if there is only 1 file, don't zip it and send back the raw file. (#315) --- API/Controllers/DownloadController.cs | 45 ++++++++++++++++++++++++--- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs index 67d23ac8e..f0a006120 100644 --- a/API/Controllers/DownloadController.cs +++ b/API/Controllers/DownloadController.cs @@ -1,7 +1,9 @@ using System; +using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; +using API.Entities; using API.Extensions; using API.Interfaces; using API.Interfaces.Services; @@ -9,6 +11,7 @@ using API.Services; using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.StaticFiles; namespace API.Controllers { @@ -17,11 +20,13 @@ namespace API.Controllers { private readonly IUnitOfWork _unitOfWork; private readonly IArchiveService _archiveService; + private readonly IDirectoryService _directoryService; - public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService) + public DownloadController(IUnitOfWork unitOfWork, IArchiveService archiveService, IDirectoryService directoryService) { _unitOfWork = unitOfWork; _archiveService = archiveService; + _directoryService = directoryService; } [HttpGet("volume-size")] @@ -51,25 +56,51 @@ namespace API.Controllers var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); try { + if (files.Count == 1) + { + return await GetFirstFileDownload(files); + } var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), $"download_{User.GetUsername()}_v{volumeId}"); - return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); + return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip"); } catch (KavitaException ex) { return BadRequest(ex.Message); } } - + + private async Task GetFirstFileDownload(IEnumerable files) + { + var firstFile = files.Select(c => c.FilePath).First(); + var fileProvider = new FileExtensionContentTypeProvider(); + // Figures out what the content type should be based on the file name. + if (!fileProvider.TryGetContentType(firstFile, out var contentType)) + { + contentType = Path.GetExtension(firstFile).ToLowerInvariant() switch + { + ".cbz" => "application/zip", + ".cbr" => "application/vnd.rar", + _ => contentType + }; + } + + return File(await _directoryService.ReadFileAsync(firstFile), contentType, Path.GetFileNameWithoutExtension(firstFile)); + } + [HttpGet("chapter")] public async Task DownloadChapter(int chapterId) { var files = await _unitOfWork.VolumeRepository.GetFilesForChapter(chapterId); try { + if (files.Count == 1) + { + return await GetFirstFileDownload(files); + } var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), $"download_{User.GetUsername()}_c{chapterId}"); - return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); + return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip"); } catch (KavitaException ex) { @@ -83,9 +114,13 @@ namespace API.Controllers var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); try { + if (files.Count == 1) + { + return await GetFirstFileDownload(files); + } var (fileBytes, zipPath) = await _archiveService.CreateZipForDownload(files.Select(c => c.FilePath), $"download_{User.GetUsername()}_s{seriesId}"); - return File(fileBytes, "application/zip", Path.GetFileName(zipPath)); + return File(fileBytes, "application/zip", Path.GetFileNameWithoutExtension(zipPath) + ".zip"); } catch (KavitaException ex) { From 0c88d91278562eb4e99fb9883714cdc5a8731cef Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 18 Jun 2021 09:49:30 -0500 Subject: [PATCH 27/55] Ignore .DS_Store and @eaDir folders when scanning directories. (#314) --- API/Services/DirectoryService.cs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs index ac5f5ec5e..f823d2066 100644 --- a/API/Services/DirectoryService.cs +++ b/API/Services/DirectoryService.cs @@ -13,6 +13,9 @@ namespace API.Services public class DirectoryService : IDirectoryService { private readonly ILogger _logger; + private static readonly Regex ExcludeDirectories = new Regex( + @"@eaDir|\.DS_Store", + RegexOptions.Compiled | RegexOptions.IgnoreCase); public DirectoryService(ILogger logger) { @@ -222,6 +225,7 @@ namespace API.Services /// Directory to scan /// Action to apply on file path /// Regex pattern to search against + /// /// public static int TraverseTreeParallelForEach(string root, Action action, string searchPattern, ILogger logger) { @@ -241,11 +245,11 @@ namespace API.Services while (dirs.Count > 0) { var currentDir = dirs.Pop(); - string[] subDirs; + IEnumerable subDirs; string[] files; try { - subDirs = Directory.GetDirectories(currentDir); + subDirs = Directory.GetDirectories(currentDir).Where(path => ExcludeDirectories.Matches(path).Count == 0); } // Thrown if we do not have discovery permission on the directory. catch (UnauthorizedAccessException e) { @@ -316,7 +320,7 @@ namespace API.Services // Push the subdirectories onto the stack for traversal. // This could also be done before handing the files. - foreach (string str in subDirs) + foreach (var str in subDirs) dirs.Push(str); } From 8f4d42d13ffb8d9505fe0af9bcb2a91086a4e5c7 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Sun, 20 Jun 2021 15:15:34 -0500 Subject: [PATCH 28/55] Add noreferrer noopener on links to external resources for epub reading (#318) --- API/Services/BookService.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs index b0252f122..f36455ff6 100644 --- a/API/Services/BookService.cs +++ b/API/Services/BookService.cs @@ -89,7 +89,8 @@ namespace API.Services } else { - anchor.Attributes.Add("target", "_blank"); + anchor.Attributes.Add("target", "_blank"); + anchor.Attributes.Add("rel", "noreferrer noopener"); } return; From b25335acbdda8862f40b5a1e6fc4a57a0ee25d71 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Sun, 20 Jun 2021 16:07:18 -0500 Subject: [PATCH 29/55] Single Download Failed on Epubs (#319) * Fixed a missed case where downloading a single file that is an epub (or cb7, zip, 7z) file would cause a critical error and the download would fail without any information. --- API/Controllers/DownloadController.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs index f0a006120..6e7408fa4 100644 --- a/API/Controllers/DownloadController.cs +++ b/API/Controllers/DownloadController.cs @@ -81,6 +81,10 @@ namespace API.Controllers { ".cbz" => "application/zip", ".cbr" => "application/vnd.rar", + ".cb7" => "application/x-compressed", + ".epub" => "application/epub+zip", + ".7z" => "application/x-7z-compressed", + ".7zip" => "application/x-7z-compressed", _ => contentType }; } From 1c9b2572ae25c390903252a3219fb07f4ec099a5 Mon Sep 17 00:00:00 2001 From: Leonardo Dias Date: Sun, 20 Jun 2021 19:26:35 -0300 Subject: [PATCH 30/55] Feat/usage stats collection (#317) * feat: implement anonymous usage data collection Co-authored-by: Joseph Milazzo --- .gitignore | 3 +- API/Controllers/SettingsController.cs | 19 +- API/Controllers/StatsController.cs | 40 ++++ API/DTOs/ClientInfoDto.cs | 36 ++++ API/DTOs/ServerInfoDto.cs | 12 ++ API/DTOs/ServerSettingDTO.cs | 1 + API/DTOs/UsageInfoDto.cs | 24 +++ API/DTOs/UsageStatisticsDto.cs | 33 ++++ API/Data/FileRepository.cs | 35 ++++ API/Data/Seed.cs | 1 + API/Entities/Enums/ServerSettingKey.cs | 5 +- .../ApplicationServiceExtensions.cs | 31 ++- API/Extensions/ServiceCollectionExtensions.cs | 16 +- .../Converters/ServerSettingConverter.cs | 3 + API/Interfaces/IFileRepository.cs | 10 + API/Interfaces/ITaskScheduler.cs | 2 + API/Interfaces/Services/IStatsService.cs | 13 ++ API/Services/Clients/StatsApiClient.cs | 55 ++++++ .../StartupTasksHostedService.cs | 54 +++++ API/Services/StatsService.cs | 186 ++++++++++++++++++ API/Services/TaskScheduler.cs | 33 +++- API/Startup.cs | 13 +- API/appsettings.Development.json | 5 + 23 files changed, 613 insertions(+), 17 deletions(-) create mode 100644 API/Controllers/StatsController.cs create mode 100644 API/DTOs/ClientInfoDto.cs create mode 100644 API/DTOs/ServerInfoDto.cs create mode 100644 API/DTOs/UsageInfoDto.cs create mode 100644 API/DTOs/UsageStatisticsDto.cs create mode 100644 API/Data/FileRepository.cs create mode 100644 API/Interfaces/IFileRepository.cs create mode 100644 API/Interfaces/Services/IStatsService.cs create mode 100644 API/Services/Clients/StatsApiClient.cs create mode 100644 API/Services/HostedServices/StartupTasksHostedService.cs create mode 100644 API/Services/StatsService.cs diff --git a/.gitignore b/.gitignore index 59b1114f5..8db0960dc 100644 --- a/.gitignore +++ b/.gitignore @@ -453,4 +453,5 @@ cache/ /API/cache/ /API/temp/ _temp/ -_output/ \ No newline at end of file +_output/ +stats/ \ No newline at end of file diff --git a/API/Controllers/SettingsController.cs b/API/Controllers/SettingsController.cs index ce68e4e5a..8677074ab 100644 --- a/API/Controllers/SettingsController.cs +++ b/API/Controllers/SettingsController.cs @@ -90,14 +90,31 @@ namespace API.Controllers Configuration.UpdateLogLevel(Program.GetAppSettingFilename(), updateSettingsDto.LoggingLevel); _unitOfWork.SettingsRepository.Update(setting); } + + if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + "" != setting.Value) + { + setting.Value = updateSettingsDto.AllowStatCollection + ""; + _unitOfWork.SettingsRepository.Update(setting); + if (!updateSettingsDto.AllowStatCollection) + { + _taskScheduler.CancelStatsTasks(); + } + else + { + _taskScheduler.ScheduleStatsTasks(); + } + } } _configuration.GetSection("Logging:LogLevel:Default").Value = updateSettingsDto.LoggingLevel + ""; if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated"); if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync()) + { + await _unitOfWork.RollbackAsync(); return BadRequest("There was a critical issue. Please try again."); - + } + _logger.LogInformation("Server Settings updated"); _taskScheduler.ScheduleTasks(); return Ok(updateSettingsDto); diff --git a/API/Controllers/StatsController.cs b/API/Controllers/StatsController.cs new file mode 100644 index 000000000..f35552eec --- /dev/null +++ b/API/Controllers/StatsController.cs @@ -0,0 +1,40 @@ +using System; +using System.Threading.Tasks; +using API.DTOs; +using API.Interfaces.Services; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; + +namespace API.Controllers +{ + public class StatsController : BaseApiController + { + private readonly ILogger _logger; + private readonly IStatsService _statsService; + + public StatsController(ILogger logger, IStatsService statsService) + { + _logger = logger; + _statsService = statsService; + } + + [AllowAnonymous] + [HttpPost("client-info")] + public async Task AddClientInfo([FromBody] ClientInfoDto clientInfoDto) + { + try + { + await _statsService.PathData(clientInfoDto); + + return Ok(); + } + catch (Exception e) + { + _logger.LogError(e, "Error updating the usage statistics"); + Console.WriteLine(e); + throw; + } + } + } +} \ No newline at end of file diff --git a/API/DTOs/ClientInfoDto.cs b/API/DTOs/ClientInfoDto.cs new file mode 100644 index 000000000..7070e64d7 --- /dev/null +++ b/API/DTOs/ClientInfoDto.cs @@ -0,0 +1,36 @@ +using System; + +namespace API.DTOs +{ + public class ClientInfoDto + { + public ClientInfoDto() + { + CollectedAt = DateTime.UtcNow; + } + + public string KavitaUiVersion { get; set; } + public string ScreenResolution { get; set; } + public string PlatformType { get; set; } + public DetailsVersion Browser { get; set; } + public DetailsVersion Os { get; set; } + + public DateTime? CollectedAt { get; set; } + + public bool IsTheSameDevice(ClientInfoDto clientInfoDto) + { + return (clientInfoDto.ScreenResolution ?? "").Equals(ScreenResolution) && + (clientInfoDto.PlatformType ?? "").Equals(PlatformType) && + (clientInfoDto.Browser?.Name ?? "").Equals(Browser?.Name) && + (clientInfoDto.Os?.Name ?? "").Equals(Os?.Name) && + clientInfoDto.CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd") + .Equals(CollectedAt.GetValueOrDefault().ToString("yyyy-MM-dd")); + } + } + + public class DetailsVersion + { + public string Name { get; set; } + public string Version { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/ServerInfoDto.cs b/API/DTOs/ServerInfoDto.cs new file mode 100644 index 000000000..0f4a86d64 --- /dev/null +++ b/API/DTOs/ServerInfoDto.cs @@ -0,0 +1,12 @@ +namespace API.DTOs +{ + public class ServerInfoDto + { + public string Os { get; set; } + public string DotNetVersion { get; set; } + public string RunTimeVersion { get; set; } + public string KavitaVersion { get; set; } + public string BuildBranch { get; set; } + public string Culture { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/ServerSettingDTO.cs b/API/DTOs/ServerSettingDTO.cs index a1617ff11..9a52f9c09 100644 --- a/API/DTOs/ServerSettingDTO.cs +++ b/API/DTOs/ServerSettingDTO.cs @@ -7,5 +7,6 @@ public string LoggingLevel { get; set; } public string TaskBackup { get; set; } public int Port { get; set; } + public bool AllowStatCollection { get; set; } } } \ No newline at end of file diff --git a/API/DTOs/UsageInfoDto.cs b/API/DTOs/UsageInfoDto.cs new file mode 100644 index 000000000..ba4b06b41 --- /dev/null +++ b/API/DTOs/UsageInfoDto.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using API.Entities.Enums; + +namespace API.DTOs +{ + public class UsageInfoDto + { + public UsageInfoDto() + { + FileTypes = new HashSet(); + LibraryTypesCreated = new HashSet(); + } + + public int UsersCount { get; set; } + public IEnumerable FileTypes { get; set; } + public IEnumerable LibraryTypesCreated { get; set; } + } + + public class LibInfo + { + public LibraryType Type { get; set; } + public int Count { get; set; } + } +} \ No newline at end of file diff --git a/API/DTOs/UsageStatisticsDto.cs b/API/DTOs/UsageStatisticsDto.cs new file mode 100644 index 000000000..1180401c3 --- /dev/null +++ b/API/DTOs/UsageStatisticsDto.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace API.DTOs +{ + public class UsageStatisticsDto + { + public UsageStatisticsDto() + { + MarkAsUpdatedNow(); + ClientsInfo = new List(); + } + + public string InstallId { get; set; } + public DateTime LastUpdate { get; set; } + public UsageInfoDto UsageInfo { get; set; } + public ServerInfoDto ServerInfo { get; set; } + public List ClientsInfo { get; set; } + + public void MarkAsUpdatedNow() + { + LastUpdate = DateTime.UtcNow; + } + + public void AddClientInfo(ClientInfoDto clientInfoDto) + { + if (ClientsInfo.Any(x => x.IsTheSameDevice(clientInfoDto))) return; + + ClientsInfo.Add(clientInfoDto); + } + } +} \ No newline at end of file diff --git a/API/Data/FileRepository.cs b/API/Data/FileRepository.cs new file mode 100644 index 000000000..a90ff4df5 --- /dev/null +++ b/API/Data/FileRepository.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Interfaces; +using Microsoft.EntityFrameworkCore; + +namespace API.Data +{ + public class FileRepository : IFileRepository + { + private readonly DataContext _dbContext; + + public FileRepository(DataContext context) + { + _dbContext = context; + } + + public async Task> GetFileExtensions() + { + var fileExtensions = await _dbContext.MangaFile + .AsNoTracking() + .Select(x => x.FilePath) + .Distinct() + .ToArrayAsync(); + + var uniqueFileTypes = fileExtensions + .Select(Path.GetExtension) + .Where(x => x is not null) + .Distinct(); + + return uniqueFileTypes; + } + } +} \ No newline at end of file diff --git a/API/Data/Seed.cs b/API/Data/Seed.cs index 01befd20c..2c7eb373b 100644 --- a/API/Data/Seed.cs +++ b/API/Data/Seed.cs @@ -46,6 +46,7 @@ namespace API.Data new () {Key = ServerSettingKey.TaskBackup, Value = "weekly"}, new () {Key = ServerSettingKey.BackupDirectory, Value = Path.GetFullPath(Path.Join(Directory.GetCurrentDirectory(), "backups/"))}, new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json + new () {Key = ServerSettingKey.AllowStatCollection, Value = "true"}, }; foreach (var defaultSetting in defaultSettings) diff --git a/API/Entities/Enums/ServerSettingKey.cs b/API/Entities/Enums/ServerSettingKey.cs index 0aa5563f2..28378d4d1 100644 --- a/API/Entities/Enums/ServerSettingKey.cs +++ b/API/Entities/Enums/ServerSettingKey.cs @@ -15,6 +15,9 @@ namespace API.Entities.Enums [Description("Port")] Port = 4, [Description("BackupDirectory")] - BackupDirectory = 5 + BackupDirectory = 5, + [Description("AllowStatCollection")] + AllowStatCollection = 6, + } } \ No newline at end of file diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index b611cf4d6..c3db5c08a 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -20,6 +20,7 @@ namespace API.Extensions public static IServiceCollection AddApplicationServices(this IServiceCollection services, IConfiguration config, IWebHostEnvironment env) { services.AddAutoMapper(typeof(AutoMapperProfiles).Assembly); + services.AddScoped(); services.AddScoped(); services.AddScoped(); services.AddScoped(); @@ -32,12 +33,8 @@ namespace API.Extensions services.AddScoped(); services.AddScoped(); - - services.AddDbContext(options => - { - options.UseSqlite(config.GetConnectionString("DefaultConnection")); - options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.GetLogLevel(Program.GetAppSettingFilename()).Equals("Debug")); - }); + services.AddSqLite(config, env); + services.ConfigRepositories(); services.AddLogging(loggingBuilder => { @@ -47,7 +44,27 @@ namespace API.Extensions return services; } - + + private static IServiceCollection AddSqLite(this IServiceCollection services, IConfiguration config, + IWebHostEnvironment env) + { + services.AddDbContext(options => + { + options.UseSqlite(config.GetConnectionString("DefaultConnection")); + options.EnableSensitiveDataLogging(env.IsDevelopment() || Configuration.GetLogLevel(Program.GetAppSettingFilename()).Equals("Debug")); + }); + + return services; + } + + private static IServiceCollection ConfigRepositories(this IServiceCollection services) + { + services.AddScoped(); + services.AddScoped(); + + return services; + } + public static IServiceCollection AddStartupTask(this IServiceCollection services) where T : class, IStartupTask => services.AddTransient(); diff --git a/API/Extensions/ServiceCollectionExtensions.cs b/API/Extensions/ServiceCollectionExtensions.cs index d3cae4191..a9d12b471 100644 --- a/API/Extensions/ServiceCollectionExtensions.cs +++ b/API/Extensions/ServiceCollectionExtensions.cs @@ -1,4 +1,7 @@ -using API.Interfaces.Services; +using System; +using API.Interfaces.Services; +using API.Services.Clients; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; namespace API.Extensions @@ -8,5 +11,16 @@ namespace API.Extensions public static IServiceCollection AddStartupTask(this IServiceCollection services) where T : class, IStartupTask => services.AddTransient(); + + public static IServiceCollection AddStatsClient(this IServiceCollection services, IConfiguration configuration) + { + services.AddHttpClient(client => + { + client.BaseAddress = new Uri("http://stats.kavitareader.com"); + client.DefaultRequestHeaders.Add("api-key", "MsnvA2DfQqxSK5jh"); + }); + + return services; + } } } \ No newline at end of file diff --git a/API/Helpers/Converters/ServerSettingConverter.cs b/API/Helpers/Converters/ServerSettingConverter.cs index 27d1cbbae..261c1bff1 100644 --- a/API/Helpers/Converters/ServerSettingConverter.cs +++ b/API/Helpers/Converters/ServerSettingConverter.cs @@ -30,6 +30,9 @@ namespace API.Helpers.Converters case ServerSettingKey.Port: destination.Port = int.Parse(row.Value); break; + case ServerSettingKey.AllowStatCollection: + destination.AllowStatCollection = bool.Parse(row.Value); + break; } } diff --git a/API/Interfaces/IFileRepository.cs b/API/Interfaces/IFileRepository.cs new file mode 100644 index 000000000..cde587855 --- /dev/null +++ b/API/Interfaces/IFileRepository.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace API.Interfaces +{ + public interface IFileRepository + { + Task> GetFileExtensions(); + } +} \ No newline at end of file diff --git a/API/Interfaces/ITaskScheduler.cs b/API/Interfaces/ITaskScheduler.cs index 75f70c1fa..4f3aba6f8 100644 --- a/API/Interfaces/ITaskScheduler.cs +++ b/API/Interfaces/ITaskScheduler.cs @@ -11,5 +11,7 @@ void RefreshMetadata(int libraryId, bool forceUpdate = true); void CleanupTemp(); void RefreshSeriesMetadata(int libraryId, int seriesId); + void ScheduleStatsTasks(); + void CancelStatsTasks(); } } \ No newline at end of file diff --git a/API/Interfaces/Services/IStatsService.cs b/API/Interfaces/Services/IStatsService.cs new file mode 100644 index 000000000..f91a4e522 --- /dev/null +++ b/API/Interfaces/Services/IStatsService.cs @@ -0,0 +1,13 @@ +using System.Threading.Tasks; +using API.DTOs; + +namespace API.Interfaces.Services +{ + public interface IStatsService + { + Task PathData(ClientInfoDto clientInfoDto); + Task FinalizeStats(); + Task CollectRelevantData(); + Task CollectAndSendStatsData(); + } +} \ No newline at end of file diff --git a/API/Services/Clients/StatsApiClient.cs b/API/Services/Clients/StatsApiClient.cs new file mode 100644 index 000000000..10b7ba543 --- /dev/null +++ b/API/Services/Clients/StatsApiClient.cs @@ -0,0 +1,55 @@ +using System; +using System.Net.Http; +using System.Net.Http.Json; +using System.Threading.Tasks; +using API.DTOs; +using Microsoft.Extensions.Logging; + +namespace API.Services.Clients +{ + public class StatsApiClient + { + private readonly HttpClient _client; + private readonly ILogger _logger; + + public StatsApiClient(HttpClient client, ILogger logger) + { + _client = client; + _logger = logger; + } + + public async Task SendDataToStatsServer(UsageStatisticsDto data) + { + var responseContent = string.Empty; + + try + { + var response = await _client.PostAsJsonAsync("/api/InstallationStats", data); + + responseContent = await response.Content.ReadAsStringAsync(); + + response.EnsureSuccessStatusCode(); + } + catch (HttpRequestException e) + { + var info = new + { + dataSent = data, + response = responseContent + }; + + _logger.LogError(e, "The StatsServer did not respond successfully. {Content}", info); + + Console.WriteLine(e); + throw; + } + catch (Exception e) + { + _logger.LogError(e, "An error happened during the request to the Stats Server"); + + Console.WriteLine(e); + throw; + } + } + } +} \ No newline at end of file diff --git a/API/Services/HostedServices/StartupTasksHostedService.cs b/API/Services/HostedServices/StartupTasksHostedService.cs new file mode 100644 index 000000000..dcdb22cca --- /dev/null +++ b/API/Services/HostedServices/StartupTasksHostedService.cs @@ -0,0 +1,54 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using API.Interfaces; +using API.Interfaces.Services; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace API.Services.HostedServices +{ + public class StartupTasksHostedService : IHostedService + { + private readonly IServiceProvider _provider; + + public StartupTasksHostedService(IServiceProvider serviceProvider) + { + _provider = serviceProvider; + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + using var scope = _provider.CreateScope(); + + var taskScheduler = scope.ServiceProvider.GetRequiredService(); + taskScheduler.ScheduleTasks(); + + try + { + await ManageStartupStatsTasks(scope, taskScheduler); + } + catch (Exception e) + { + //If stats startup fail the user can keep using the app + } + } + + private async Task ManageStartupStatsTasks(IServiceScope serviceScope, ITaskScheduler taskScheduler) + { + var settingsRepository = serviceScope.ServiceProvider.GetRequiredService(); + + var settingsDto = await settingsRepository.GetSettingsDtoAsync(); + + if (!settingsDto.AllowStatCollection) return; + + taskScheduler.ScheduleStatsTasks(); + + var statsService = serviceScope.ServiceProvider.GetRequiredService(); + + await statsService.CollectAndSendStatsData(); + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; + } +} \ No newline at end of file diff --git a/API/Services/StatsService.cs b/API/Services/StatsService.cs new file mode 100644 index 000000000..4d5e3a315 --- /dev/null +++ b/API/Services/StatsService.cs @@ -0,0 +1,186 @@ +using System; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using API.Data; +using API.DTOs; +using API.Interfaces; +using API.Interfaces.Services; +using API.Services.Clients; +using Kavita.Common; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Services +{ + public class StatsService : IStatsService + { + private const string TempFilePath = "stats/"; + private const string TempFileName = "app_stats.json"; + + private readonly StatsApiClient _client; + private readonly DataContext _dbContext; + private readonly ILogger _logger; + private readonly IFileRepository _fileRepository; + + public StatsService(StatsApiClient client, DataContext dbContext, ILogger logger, + IFileRepository fileRepository) + { + _client = client; + _dbContext = dbContext; + _logger = logger; + _fileRepository = fileRepository; + } + + private static string FinalPath => Path.Combine(Directory.GetCurrentDirectory(), TempFilePath, TempFileName); + private static bool FileExists => File.Exists(FinalPath); + + public async Task PathData(ClientInfoDto clientInfoDto) + { + _logger.LogInformation("Pathing client data to the file"); + + var statisticsDto = await GetData(); + + statisticsDto.AddClientInfo(clientInfoDto); + + await SaveFile(statisticsDto); + } + + public async Task CollectRelevantData() + { + _logger.LogInformation("Collecting data from the server and database"); + + _logger.LogInformation("Collecting usage info"); + var usageInfo = await GetUsageInfo(); + + _logger.LogInformation("Collecting server info"); + var serverInfo = GetServerInfo(); + + await PathData(serverInfo, usageInfo); + } + + public async Task FinalizeStats() + { + try + { + _logger.LogInformation("Finalizing Stats collection flow"); + + var data = await GetExistingData(); + + _logger.LogInformation("Sending data to the Stats server"); + await _client.SendDataToStatsServer(data); + + _logger.LogInformation("Deleting the file from disk"); + if (FileExists) File.Delete(FinalPath); + } + catch (Exception e) + { + _logger.LogError("Error Finalizing Stats collection flow", e); + throw; + } + } + + public async Task CollectAndSendStatsData() + { + await CollectRelevantData(); + await FinalizeStats(); + } + + private async Task PathData(ServerInfoDto serverInfoDto, UsageInfoDto usageInfoDto) + { + _logger.LogInformation("Pathing server and usage info to the file"); + + var data = await GetData(); + + data.ServerInfo = serverInfoDto; + data.UsageInfo = usageInfoDto; + + data.MarkAsUpdatedNow(); + + await SaveFile(data); + } + + private async ValueTask GetData() + { + if (!FileExists) return new UsageStatisticsDto {InstallId = HashUtil.AnonymousToken()}; + + return await GetExistingData(); + } + + private async Task GetUsageInfo() + { + var usersCount = await _dbContext.Users.CountAsync(); + + var libsCountByType = await _dbContext.Library + .AsNoTracking() + .GroupBy(x => x.Type) + .Select(x => new LibInfo {Type = x.Key, Count = x.Count()}) + .ToArrayAsync(); + + var uniqueFileTypes = await _fileRepository.GetFileExtensions(); + + var usageInfo = new UsageInfoDto + { + UsersCount = usersCount, + LibraryTypesCreated = libsCountByType, + FileTypes = uniqueFileTypes + }; + + return usageInfo; + } + + private static ServerInfoDto GetServerInfo() + { + var serverInfo = new ServerInfoDto + { + Os = RuntimeInformation.OSDescription, + DotNetVersion = Environment.Version.ToString(), + RunTimeVersion = RuntimeInformation.FrameworkDescription, + KavitaVersion = BuildInfo.Version.ToString(), + Culture = Thread.CurrentThread.CurrentCulture.Name, + BuildBranch = BuildInfo.Branch + }; + + return serverInfo; + } + + private async Task GetExistingData() + { + _logger.LogInformation("Fetching existing data from file"); + var existingDataJson = await GetFileDataAsString(); + + _logger.LogInformation("Deserializing data from file to object"); + var existingData = JsonSerializer.Deserialize(existingDataJson); + + return existingData; + } + + private async Task GetFileDataAsString() + { + _logger.LogInformation("Reading file from disk"); + return await File.ReadAllTextAsync(FinalPath); + } + + private async Task SaveFile(UsageStatisticsDto statisticsDto) + { + _logger.LogInformation("Saving file"); + + var finalDirectory = FinalPath.Replace(TempFileName, string.Empty); + if (!Directory.Exists(finalDirectory)) + { + _logger.LogInformation("Creating tmp directory"); + Directory.CreateDirectory(finalDirectory); + } + + _logger.LogInformation("Serializing data to write"); + var dataJson = JsonSerializer.Serialize(statisticsDto); + + _logger.LogInformation("Writing file to the disk"); + await File.WriteAllTextAsync(FinalPath, dataJson); + } + } +} \ No newline at end of file diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index b284fd9f7..61ee114b3 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -19,11 +19,14 @@ namespace API.Services private readonly IBackupService _backupService; private readonly ICleanupService _cleanupService; + private readonly IStatsService _statsService; + public static BackgroundJobServer Client => new BackgroundJobServer(); public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService, - IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService) + IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, + ICleanupService cleanupService, IStatsService statsService) { _cacheService = cacheService; _logger = logger; @@ -32,6 +35,7 @@ namespace API.Services _metadataService = metadataService; _backupService = backupService; _cleanupService = cleanupService; + _statsService = statsService; } public void ScheduleTasks() @@ -65,6 +69,33 @@ namespace API.Services RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily); } + #region StatsTasks + + private const string SendDataTask = "finalize-stats"; + public void ScheduleStatsTasks() + { + var allowStatCollection = bool.Parse(Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.AllowStatCollection)).GetAwaiter().GetResult().Value); + if (!allowStatCollection) + { + _logger.LogDebug("User has opted out of stat collection, not registering tasks"); + return; + } + + _logger.LogDebug("Adding StatsTasks"); + + _logger.LogDebug("Scheduling Send data to the Stats server {Setting}", nameof(Cron.Daily)); + RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily); + } + + public void CancelStatsTasks() + { + _logger.LogDebug("Cancelling/Removing StatsTasks"); + + RecurringJob.RemoveIfExists(SendDataTask); + } + + #endregion + public void ScanLibrary(int libraryId, bool forceUpdate = false) { _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); diff --git a/API/Startup.cs b/API/Startup.cs index 97d64145e..f2b648d24 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -2,9 +2,9 @@ using System; using System.IO.Compression; using System.Linq; using API.Extensions; -using API.Interfaces; using API.Middleware; using API.Services; +using API.Services.HostedServices; using Hangfire; using Hangfire.MemoryStorage; using Kavita.Common.EnvironmentInfo; @@ -64,6 +64,8 @@ namespace API services.AddResponseCaching(); + services.AddStatsClient(_config); + services.AddHangfire(configuration => configuration .UseSimpleAssemblyNameTypeSerializer() .UseRecommendedSerializerSettings() @@ -71,11 +73,15 @@ namespace API // Add the processing server as IHostedService services.AddHangfireServer(); + + // Add IHostedService for startup tasks + // Any services that should be bootstrapped go here + services.AddHostedService(); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, - IHostApplicationLifetime applicationLifetime, ITaskScheduler taskScheduler) + IHostApplicationLifetime applicationLifetime) { app.UseMiddleware(); @@ -137,9 +143,6 @@ namespace API { Console.WriteLine($"Kavita - v{BuildInfo.Version}"); }); - - // Any services that should be bootstrapped go here - taskScheduler.ScheduleTasks(); } private void OnShutdown() diff --git a/API/appsettings.Development.json b/API/appsettings.Development.json index 119a1eb46..35e9218b9 100644 --- a/API/appsettings.Development.json +++ b/API/appsettings.Development.json @@ -3,6 +3,11 @@ "DefaultConnection": "Data source=kavita.db" }, "TokenKey": "super secret unguessable key", + "StatsOptions": { + "ServerUrl": "http://localhost:5002", + "ServerSecret": "here's where the api key goes", + "SendDataAt": "23:50" + }, "Logging": { "LogLevel": { "Default": "Debug", From 568544f65274b101174a12f40b769d09439404a1 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 24 Jun 2021 12:25:59 -0500 Subject: [PATCH 31/55] Update FUNDING.yml Added opencollective sponsorship --- .github/FUNDING.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index a5717dec4..7ec0eb972 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,8 +1,8 @@ # These are supported funding model platforms -github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +github: [majora2007] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] patreon: # Replace with a single Patreon username -open_collective: # Replace with a single Open Collective username +open_collective: kavita # Replace with a single Open Collective username ko_fi: # Replace with a single Ko-fi username tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry From b95834239493c0912acf8322a635bebe9882f1a5 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 24 Jun 2021 12:26:37 -0500 Subject: [PATCH 32/55] Update FUNDING.yml Removed user from sponsorship --- .github/FUNDING.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 7ec0eb972..8bca3c1ef 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,6 +1,6 @@ # These are supported funding model platforms -github: [majora2007] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] patreon: # Replace with a single Patreon username open_collective: kavita # Replace with a single Open Collective username ko_fi: # Replace with a single Ko-fi username From be2b78fa5a968be0878d67daa3f666d23fb6bc9c Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Thu, 24 Jun 2021 19:31:42 -0500 Subject: [PATCH 33/55] Manga Redesign (#321) * Code cleanup, refactored FileRepository into Unit of Work. * Added AutoCloseMenu and ReaderMode user perferences to match UI * Added extra information to ChapterInfo * Build changes * Updated the readme to have open collective information and thanks to sponsors * Fixed an issue with UnitOfWork refactor and how stats was bootsrapped. Replaced stats.kavitareader with a temp url to test out redirection bug. --- .../Comparers/NaturalSortComparerTest.cs | 4 + API/API.csproj | 124 +++ API/Comparators/StringLogicalComparer.cs | 20 +- .../CustomOptions/StatsOptions.cs | 30 + API/Controllers/CollectionController.cs | 1 - API/Controllers/ReaderController.cs | 25 +- API/Controllers/ServerController.cs | 2 - API/Controllers/UsersController.cs | 2 + API/DTOs/CollectionTagDto.cs | 4 +- API/DTOs/Reader/ChapterInfoDto.cs | 16 + API/DTOs/UpdateSeriesMetadataDto.cs | 1 - API/DTOs/UserPreferencesDto.cs | 2 + ...10622164318_NewUserPreferences.Designer.cs | 869 ++++++++++++++++++ .../20210622164318_NewUserPreferences.cs | 35 + .../Migrations/DataContextModelSnapshot.cs | 6 + API/Data/SeriesRepository.cs | 2 +- API/Data/UnitOfWork.cs | 3 +- API/Entities/AppUserPreferences.cs | 13 +- API/Entities/Enums/ReaderMode.cs | 14 + API/Entities/Series.cs | 2 +- .../ApplicationServiceExtensions.cs | 16 +- API/Extensions/ServiceCollectionExtensions.cs | 2 +- API/Interfaces/IUnitOfWork.cs | 1 + API/Interfaces/Services/IArchiveService.cs | 1 - API/Program.cs | 8 +- API/Services/CacheService.cs | 4 - API/Services/Clients/StatsApiClient.cs | 7 +- API/Services/ComicInfo.cs | 1 + .../StartupTasksHostedService.cs | 6 +- API/Services/StatsService.cs | 12 +- CONTRIBUTING.md | 56 ++ Kavita.Common/Kavita.Common.csproj | 2 +- Logo/dottrace.svg | 33 + Logo/jetbrains.svg | 66 ++ Logo/kavita.svg | 124 +++ Logo/resharper.svg | 50 + Logo/rider.svg | 42 + Logo/sentry.svg | 1 + README.md | 79 +- build.sh | 9 + 40 files changed, 1608 insertions(+), 87 deletions(-) create mode 100644 API/Configurations/CustomOptions/StatsOptions.cs create mode 100644 API/DTOs/Reader/ChapterInfoDto.cs create mode 100644 API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs create mode 100644 API/Data/Migrations/20210622164318_NewUserPreferences.cs create mode 100644 API/Entities/Enums/ReaderMode.cs create mode 100644 CONTRIBUTING.md create mode 100644 Logo/dottrace.svg create mode 100644 Logo/jetbrains.svg create mode 100644 Logo/kavita.svg create mode 100644 Logo/resharper.svg create mode 100644 Logo/rider.svg create mode 100644 Logo/sentry.svg diff --git a/API.Tests/Comparers/NaturalSortComparerTest.cs b/API.Tests/Comparers/NaturalSortComparerTest.cs index 39bad2003..d7c58d45a 100644 --- a/API.Tests/Comparers/NaturalSortComparerTest.cs +++ b/API.Tests/Comparers/NaturalSortComparerTest.cs @@ -42,6 +42,10 @@ namespace API.Tests.Comparers new[] {"3and4.cbz", "The World God Only Knows - Oneshot.cbz", "5.cbz", "1and2.cbz"}, new[] {"1and2.cbz", "3and4.cbz", "5.cbz", "The World God Only Knows - Oneshot.cbz"} )] + [InlineData( + new[] {"Solo Leveling - c000 (v01) - p000 [Cover] [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p001 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p002 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p003 [dig] [Yen Press] [LuCaZ].jpg"}, + new[] {"Solo Leveling - c000 (v01) - p000 [Cover] [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p001 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p002 [dig] [Yen Press] [LuCaZ].jpg", "Solo Leveling - c000 (v01) - p003 [dig] [Yen Press] [LuCaZ].jpg"} + )] public void TestNaturalSortComparer(string[] input, string[] expected) { Array.Sort(input, _nc); diff --git a/API/API.csproj b/API/API.csproj index 458830ca1..50a464d0b 100644 --- a/API/API.csproj +++ b/API/API.csproj @@ -64,23 +64,147 @@ + + + + <_ContentIncludedByDefault Remove="logs\kavita.json" /> + <_ContentIncludedByDefault Remove="wwwroot\3rdpartylicenses.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\6.d9925ea83359bb4c7278.js" /> + <_ContentIncludedByDefault Remove="wwwroot\6.d9925ea83359bb4c7278.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\7.860cdd6fd9d758e6c210.js" /> + <_ContentIncludedByDefault Remove="wwwroot\7.860cdd6fd9d758e6c210.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\8.028f6737a2f0621d40c7.js" /> + <_ContentIncludedByDefault Remove="wwwroot\8.028f6737a2f0621d40c7.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\EBGaramond-Italic-VariableFont_wght.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\EBGaramond-VariableFont_wght.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\EBGarmond\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Black.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-BlackItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-BoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraBoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraLight.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ExtraLightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-LightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Medium.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-MediumItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-SemiBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-SemiBoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-Thin.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\FiraSans-ThinItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Fira_Sans\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Black.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-BlackItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-BoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-LightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-Thin.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\Lato-ThinItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Lato\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\LibreBaskerville-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Baskerville\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\LibreCaslonText-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Libre_Caslon\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Black.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-BlackItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-BoldItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Italic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-LightItalic.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\Merriweather-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Merriweather\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-ExtraBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\NanumGothic-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Nanum_Gothic\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\Oswald-VariableFont_wght.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\README.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Bold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-ExtraLight.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Light.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Medium.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\Oswald\static\Oswald-SemiBold.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\RocknRoll_One\OFL.txt" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\fonts\RocknRoll_One\RocknRollOne-Regular.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.dark-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.dark.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\error-placeholder2.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.dark-min.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.dark.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\image-placeholder.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\images\preset-light.png" /> + <_ContentIncludedByDefault Remove="wwwroot\assets\themes\dark.scss" /> + <_ContentIncludedByDefault Remove="wwwroot\common.ad975892146299f80adb.js" /> + <_ContentIncludedByDefault Remove="wwwroot\common.ad975892146299f80adb.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\EBGaramond-VariableFont_wght.2a1da2dbe7a28d63f8cb.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.0fea24969112a781acd2.eot" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.c967a94cfbe2b06627ff.woff2" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.dc2cbadd690e1d4b2c9c.woff" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.e33e2cf6e02cac2ccb77.svg" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-brands-400.ec82f282c7f54b637098.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.06b9d19ced8d17f3d5cb.svg" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.08f9891a6f44d9546678.eot" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1008b5226941c24f4468.woff2" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1069ea55beaa01060302.woff" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-regular-400.1495f578452eb676f730.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.10ecefc282f2761808bf.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.371dbce0dd46bd4d2033.svg" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.3a24a60e7f9c6574864a.eot" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.3ceb50e7bcafb577367c.woff2" /> + <_ContentIncludedByDefault Remove="wwwroot\fa-solid-900.46fdbd2d897f8824e63c.woff" /> + <_ContentIncludedByDefault Remove="wwwroot\favicon.ico" /> + <_ContentIncludedByDefault Remove="wwwroot\FiraSans-Regular.1c0bf0728b51cb9f2ddc.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\index.html" /> + <_ContentIncludedByDefault Remove="wwwroot\Lato-Regular.9919edff6283018571ad.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\LibreBaskerville-Regular.a27f99ca45522bb3d56d.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\main.44f5c0973044295d8be0.js" /> + <_ContentIncludedByDefault Remove="wwwroot\main.44f5c0973044295d8be0.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\Merriweather-Regular.55c73e48e04ec926ebfe.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\NanumGothic-Regular.6c84540de7730f833d6c.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\polyfills.348e08e9d0e910a15938.js" /> + <_ContentIncludedByDefault Remove="wwwroot\polyfills.348e08e9d0e910a15938.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\RocknRollOne-Regular.c75da4712d1e65ed1f69.ttf" /> + <_ContentIncludedByDefault Remove="wwwroot\runtime.ea545c6916f85411478f.js" /> + <_ContentIncludedByDefault Remove="wwwroot\runtime.ea545c6916f85411478f.js.map" /> + <_ContentIncludedByDefault Remove="wwwroot\styles.4bd902bb3037f36f2c64.css" /> + <_ContentIncludedByDefault Remove="wwwroot\styles.4bd902bb3037f36f2c64.css.map" /> + <_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js" /> + <_ContentIncludedByDefault Remove="wwwroot\vendor.6b2a0912ae80e6fd297f.js.map" /> diff --git a/API/Comparators/StringLogicalComparer.cs b/API/Comparators/StringLogicalComparer.cs index fe930c45c..67aa72225 100644 --- a/API/Comparators/StringLogicalComparer.cs +++ b/API/Comparators/StringLogicalComparer.cs @@ -2,7 +2,7 @@ // Version 2 // Taken from: https://www.codeproject.com/Articles/11016/Numeric-String-Sort-in-C -using System; +using static System.Char; namespace API.Comparators { @@ -20,26 +20,26 @@ namespace API.Comparators if (string.IsNullOrEmpty(s2)) return -1; //WE style, special case - var sp1 = Char.IsLetterOrDigit(s1, 0); - var sp2 = Char.IsLetterOrDigit(s2, 0); + var sp1 = IsLetterOrDigit(s1, 0); + var sp2 = IsLetterOrDigit(s2, 0); if(sp1 && !sp2) return 1; if(!sp1 && sp2) return -1; int i1 = 0, i2 = 0; //current index while(true) { - var c1 = Char.IsDigit(s1, i1); - var c2 = Char.IsDigit(s2, i2); + var c1 = IsDigit(s1, i1); + var c2 = IsDigit(s2, i2); int r; // temp result if(!c1 && !c2) { - bool letter1 = Char.IsLetter(s1, i1); - bool letter2 = Char.IsLetter(s2, i2); + bool letter1 = IsLetter(s1, i1); + bool letter2 = IsLetter(s2, i2); if((letter1 && letter2) || (!letter1 && !letter2)) { if(letter1 && letter2) { - r = Char.ToLower(s1[i1]).CompareTo(Char.ToLower(s2[i2])); + r = ToLower(s1[i1]).CompareTo(ToLower(s2[i2])); } else { @@ -114,8 +114,8 @@ namespace API.Comparators { nzStart = start; end = start; - bool countZeros = true; - while(Char.IsDigit(s, end)) + var countZeros = true; + while(IsDigit(s, end)) { if(countZeros && s[end].Equals('0')) { diff --git a/API/Configurations/CustomOptions/StatsOptions.cs b/API/Configurations/CustomOptions/StatsOptions.cs new file mode 100644 index 000000000..ac0cd0ac5 --- /dev/null +++ b/API/Configurations/CustomOptions/StatsOptions.cs @@ -0,0 +1,30 @@ +using System; + +namespace API.Configurations.CustomOptions +{ + public class StatsOptions + { + public string ServerUrl { get; set; } + public string ServerSecret { get; set; } + public string SendDataAt { get; set; } + + private const char Separator = ':'; + + public short SendDataHour => GetValueFromSendAt(0); + public short SendDataMinute => GetValueFromSendAt(1); + + // The expected SendDataAt format is: Hour:Minute. Ex: 19:45 + private short GetValueFromSendAt(int index) + { + var key = $"{nameof(StatsOptions)}:{nameof(SendDataAt)}"; + + if (string.IsNullOrEmpty(SendDataAt)) + throw new InvalidOperationException($"{key} is invalid. Check the app settings file"); + + if (short.TryParse(SendDataAt.Split(Separator)[index], out var parsedValue)) + return parsedValue; + + throw new InvalidOperationException($"Could not parse {key}. Check the app settings file"); + } + } +} \ No newline at end of file diff --git a/API/Controllers/CollectionController.cs b/API/Controllers/CollectionController.cs index 6ad5fdbaf..e09f8592a 100644 --- a/API/Controllers/CollectionController.cs +++ b/API/Controllers/CollectionController.cs @@ -10,7 +10,6 @@ using API.Interfaces; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; namespace API.Controllers { diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs index 2ac3d51fe..b9bc15fb7 100644 --- a/API/Controllers/ReaderController.cs +++ b/API/Controllers/ReaderController.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Threading.Tasks; using API.Comparators; using API.DTOs; +using API.DTOs.Reader; using API.Entities; using API.Extensions; using API.Interfaces; @@ -49,15 +50,27 @@ namespace API.Controllers return File(content, "image/" + format); } - - [HttpGet("chapter-path")] - public async Task> GetImagePath(int chapterId) + + [HttpGet("chapter-info")] + public async Task> GetChapterInfo(int chapterId) { var chapter = await _cacheService.Ensure(chapterId); - if (chapter == null) return BadRequest("There was an issue finding image file for reading"); - + if (chapter == null) return BadRequest("Could not find Chapter"); + var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId); + if (volume == null) return BadRequest("Could not find Volume"); var (_, mangaFile) = await _cacheService.GetCachedPagePath(chapter, 0); - return Ok(mangaFile.FilePath); + var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); + + return Ok(new ChapterInfoDto() + { + ChapterNumber = chapter.Range, + VolumeNumber = volume.Number + string.Empty, + VolumeId = volume.Id, + FileName = Path.GetFileName(mangaFile.FilePath), + SeriesName = series?.Name, + IsSpecial = chapter.IsSpecial, + Pages = chapter.Pages, + }); } [HttpGet("get-bookmark")] diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs index 7bedceb3f..398de3efc 100644 --- a/API/Controllers/ServerController.cs +++ b/API/Controllers/ServerController.cs @@ -1,10 +1,8 @@ using System; using System.IO; -using System.IO.Compression; using System.Threading.Tasks; using API.Extensions; using API.Interfaces.Services; -using API.Services; using Kavita.Common; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs index 3a9a44d6c..ee4c9ac66 100644 --- a/API/Controllers/UsersController.cs +++ b/API/Controllers/UsersController.cs @@ -61,6 +61,8 @@ namespace API.Controllers existingPreferences.ReadingDirection = preferencesDto.ReadingDirection; existingPreferences.ScalingOption = preferencesDto.ScalingOption; existingPreferences.PageSplitOption = preferencesDto.PageSplitOption; + existingPreferences.AutoCloseMenu = preferencesDto.AutoCloseMenu; + existingPreferences.ReaderMode = preferencesDto.ReaderMode; existingPreferences.BookReaderMargin = preferencesDto.BookReaderMargin; existingPreferences.BookReaderLineSpacing = preferencesDto.BookReaderLineSpacing; existingPreferences.BookReaderFontFamily = preferencesDto.BookReaderFontFamily; diff --git a/API/DTOs/CollectionTagDto.cs b/API/DTOs/CollectionTagDto.cs index 72027e84a..26f256562 100644 --- a/API/DTOs/CollectionTagDto.cs +++ b/API/DTOs/CollectionTagDto.cs @@ -1,6 +1,4 @@ -using System.Collections.Generic; - -namespace API.DTOs +namespace API.DTOs { public class CollectionTagDto { diff --git a/API/DTOs/Reader/ChapterInfoDto.cs b/API/DTOs/Reader/ChapterInfoDto.cs new file mode 100644 index 000000000..850149016 --- /dev/null +++ b/API/DTOs/Reader/ChapterInfoDto.cs @@ -0,0 +1,16 @@ +namespace API.DTOs.Reader +{ + public class ChapterInfoDto + { + + public string ChapterNumber { get; set; } + public string VolumeNumber { get; set; } + public int VolumeId { get; set; } + public string SeriesName { get; set; } + public string ChapterTitle { get; set; } = ""; + public int Pages { get; set; } + public string FileName { get; set; } + public bool IsSpecial { get; set; } + + } +} \ No newline at end of file diff --git a/API/DTOs/UpdateSeriesMetadataDto.cs b/API/DTOs/UpdateSeriesMetadataDto.cs index fd71526b7..a9c852632 100644 --- a/API/DTOs/UpdateSeriesMetadataDto.cs +++ b/API/DTOs/UpdateSeriesMetadataDto.cs @@ -1,5 +1,4 @@ using System.Collections.Generic; -using API.Entities; namespace API.DTOs { diff --git a/API/DTOs/UserPreferencesDto.cs b/API/DTOs/UserPreferencesDto.cs index 0d8f3ae68..03dbeaa5e 100644 --- a/API/DTOs/UserPreferencesDto.cs +++ b/API/DTOs/UserPreferencesDto.cs @@ -7,6 +7,8 @@ namespace API.DTOs public ReadingDirection ReadingDirection { get; set; } public ScalingOption ScalingOption { get; set; } public PageSplitOption PageSplitOption { get; set; } + public ReaderMode ReaderMode { get; set; } + public bool AutoCloseMenu { get; set; } public bool BookReaderDarkMode { get; set; } = false; public int BookReaderMargin { get; set; } public int BookReaderLineSpacing { get; set; } diff --git a/API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs b/API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs new file mode 100644 index 000000000..2797f05ab --- /dev/null +++ b/API/Data/Migrations/20210622164318_NewUserPreferences.Designer.cs @@ -0,0 +1,869 @@ +// +using System; +using API.Data; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; + +namespace API.Data.Migrations +{ + [DbContext(typeof(DataContext))] + [Migration("20210622164318_NewUserPreferences")] + partial class NewUserPreferences + { + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder + .HasAnnotation("ProductVersion", "5.0.4"); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Name") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedName") + .IsUnique() + .HasDatabaseName("RoleNameIndex"); + + b.ToTable("AspNetRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AccessFailedCount") + .HasColumnType("INTEGER"); + + b.Property("ConcurrencyStamp") + .IsConcurrencyToken() + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("Email") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("EmailConfirmed") + .HasColumnType("INTEGER"); + + b.Property("LastActive") + .HasColumnType("TEXT"); + + b.Property("LockoutEnabled") + .HasColumnType("INTEGER"); + + b.Property("LockoutEnd") + .HasColumnType("TEXT"); + + b.Property("NormalizedEmail") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("NormalizedUserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.Property("PasswordHash") + .HasColumnType("TEXT"); + + b.Property("PhoneNumber") + .HasColumnType("TEXT"); + + b.Property("PhoneNumberConfirmed") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SecurityStamp") + .HasColumnType("TEXT"); + + b.Property("TwoFactorEnabled") + .HasColumnType("INTEGER"); + + b.Property("UserName") + .HasMaxLength(256) + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("NormalizedEmail") + .HasDatabaseName("EmailIndex"); + + b.HasIndex("NormalizedUserName") + .IsUnique() + .HasDatabaseName("UserNameIndex"); + + b.ToTable("AspNetUsers"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + + b.Property("BookReaderDarkMode") + .HasColumnType("INTEGER"); + + b.Property("BookReaderFontFamily") + .HasColumnType("TEXT"); + + b.Property("BookReaderFontSize") + .HasColumnType("INTEGER"); + + b.Property("BookReaderLineSpacing") + .HasColumnType("INTEGER"); + + b.Property("BookReaderMargin") + .HasColumnType("INTEGER"); + + b.Property("BookReaderReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("BookReaderTapToPaginate") + .HasColumnType("INTEGER"); + + b.Property("PageSplitOption") + .HasColumnType("INTEGER"); + + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + + b.Property("ReadingDirection") + .HasColumnType("INTEGER"); + + b.Property("ScalingOption") + .HasColumnType("INTEGER"); + + b.Property("SiteDarkMode") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId") + .IsUnique(); + + b.ToTable("AppUserPreferences"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("BookScrollId") + .HasColumnType("TEXT"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("PagesRead") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserProgresses"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AppUserId") + .HasColumnType("INTEGER"); + + b.Property("Rating") + .HasColumnType("INTEGER"); + + b.Property("Review") + .HasColumnType("TEXT"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("AppUserId"); + + b.ToTable("AppUserRating"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("UserId", "RoleId"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetUserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("IsSpecial") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("Range") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.Property("VolumeId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("VolumeId"); + + b.ToTable("Chapter"); + }); + + modelBuilder.Entity("API.Entities.CollectionTag", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("NormalizedTitle") + .HasColumnType("TEXT"); + + b.Property("Promoted") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.Property("Title") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("Id", "Promoted") + .IsUnique(); + + b.ToTable("CollectionTag"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("LastScanned") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("Path") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.ToTable("FolderPath"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("TEXT"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Type") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ChapterId") + .HasColumnType("INTEGER"); + + b.Property("FilePath") + .HasColumnType("TEXT"); + + b.Property("Format") + .HasColumnType("INTEGER"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("ChapterId"); + + b.ToTable("MangaFile"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("LibraryId") + .HasColumnType("INTEGER"); + + b.Property("LocalizedName") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("NormalizedName") + .HasColumnType("TEXT"); + + b.Property("OriginalName") + .HasColumnType("TEXT"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SortName") + .HasColumnType("TEXT"); + + b.Property("Summary") + .HasColumnType("TEXT"); + + b.HasKey("Id"); + + b.HasIndex("LibraryId"); + + b.HasIndex("Name", "NormalizedName", "LocalizedName", "LibraryId") + .IsUnique(); + + b.ToTable("Series"); + }); + + modelBuilder.Entity("API.Entities.SeriesMetadata", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId") + .IsUnique(); + + b.HasIndex("Id", "SeriesId") + .IsUnique(); + + b.ToTable("SeriesMetadata"); + }); + + modelBuilder.Entity("API.Entities.ServerSetting", b => + { + b.Property("Key") + .HasColumnType("INTEGER"); + + b.Property("RowVersion") + .IsConcurrencyToken() + .HasColumnType("INTEGER"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("Key"); + + b.ToTable("ServerSetting"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CoverImage") + .HasColumnType("BLOB"); + + b.Property("Created") + .HasColumnType("TEXT"); + + b.Property("LastModified") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Number") + .HasColumnType("INTEGER"); + + b.Property("Pages") + .HasColumnType("INTEGER"); + + b.Property("SeriesId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("SeriesId"); + + b.ToTable("Volume"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.Property("AppUsersId") + .HasColumnType("INTEGER"); + + b.Property("LibrariesId") + .HasColumnType("INTEGER"); + + b.HasKey("AppUsersId", "LibrariesId"); + + b.HasIndex("LibrariesId"); + + b.ToTable("AppUserLibrary"); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.Property("CollectionTagsId") + .HasColumnType("INTEGER"); + + b.Property("SeriesMetadatasId") + .HasColumnType("INTEGER"); + + b.HasKey("CollectionTagsId", "SeriesMetadatasId"); + + b.HasIndex("SeriesMetadatasId"); + + b.ToTable("CollectionTagSeriesMetadata"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("RoleId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RoleId"); + + b.ToTable("AspNetRoleClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("ClaimType") + .HasColumnType("TEXT"); + + b.Property("ClaimValue") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserClaims"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("ProviderKey") + .HasColumnType("TEXT"); + + b.Property("ProviderDisplayName") + .HasColumnType("TEXT"); + + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.HasKey("LoginProvider", "ProviderKey"); + + b.HasIndex("UserId"); + + b.ToTable("AspNetUserLogins"); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.Property("UserId") + .HasColumnType("INTEGER"); + + b.Property("LoginProvider") + .HasColumnType("TEXT"); + + b.Property("Name") + .HasColumnType("TEXT"); + + b.Property("Value") + .HasColumnType("TEXT"); + + b.HasKey("UserId", "LoginProvider", "Name"); + + b.ToTable("AspNetUserTokens"); + }); + + modelBuilder.Entity("API.Entities.AppUserPreferences", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithOne("UserPreferences") + .HasForeignKey("API.Entities.AppUserPreferences", "AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserProgress", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Progresses") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRating", b => + { + b.HasOne("API.Entities.AppUser", "AppUser") + .WithMany("Ratings") + .HasForeignKey("AppUserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("AppUser"); + }); + + modelBuilder.Entity("API.Entities.AppUserRole", b => + { + b.HasOne("API.Entities.AppRole", "Role") + .WithMany("UserRoles") + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.AppUser", "User") + .WithMany("UserRoles") + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Role"); + + b.Navigation("User"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.HasOne("API.Entities.Volume", "Volume") + .WithMany("Chapters") + .HasForeignKey("VolumeId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Volume"); + }); + + modelBuilder.Entity("API.Entities.FolderPath", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Folders") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.MangaFile", b => + { + b.HasOne("API.Entities.Chapter", "Chapter") + .WithMany("Files") + .HasForeignKey("ChapterId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Chapter"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.HasOne("API.Entities.Library", "Library") + .WithMany("Series") + .HasForeignKey("LibraryId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Library"); + }); + + modelBuilder.Entity("API.Entities.SeriesMetadata", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithOne("Metadata") + .HasForeignKey("API.Entities.SeriesMetadata", "SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.HasOne("API.Entities.Series", "Series") + .WithMany("Volumes") + .HasForeignKey("SeriesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("AppUserLibrary", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("AppUsersId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.Library", null) + .WithMany() + .HasForeignKey("LibrariesId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("CollectionTagSeriesMetadata", b => + { + b.HasOne("API.Entities.CollectionTag", null) + .WithMany() + .HasForeignKey("CollectionTagsId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + + b.HasOne("API.Entities.SeriesMetadata", null) + .WithMany() + .HasForeignKey("SeriesMetadatasId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityRoleClaim", b => + { + b.HasOne("API.Entities.AppRole", null) + .WithMany() + .HasForeignKey("RoleId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserClaim", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserLogin", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("Microsoft.AspNetCore.Identity.IdentityUserToken", b => + { + b.HasOne("API.Entities.AppUser", null) + .WithMany() + .HasForeignKey("UserId") + .OnDelete(DeleteBehavior.Cascade) + .IsRequired(); + }); + + modelBuilder.Entity("API.Entities.AppRole", b => + { + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.AppUser", b => + { + b.Navigation("Progresses"); + + b.Navigation("Ratings"); + + b.Navigation("UserPreferences"); + + b.Navigation("UserRoles"); + }); + + modelBuilder.Entity("API.Entities.Chapter", b => + { + b.Navigation("Files"); + }); + + modelBuilder.Entity("API.Entities.Library", b => + { + b.Navigation("Folders"); + + b.Navigation("Series"); + }); + + modelBuilder.Entity("API.Entities.Series", b => + { + b.Navigation("Metadata"); + + b.Navigation("Volumes"); + }); + + modelBuilder.Entity("API.Entities.Volume", b => + { + b.Navigation("Chapters"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/API/Data/Migrations/20210622164318_NewUserPreferences.cs b/API/Data/Migrations/20210622164318_NewUserPreferences.cs new file mode 100644 index 000000000..bd75d5b2c --- /dev/null +++ b/API/Data/Migrations/20210622164318_NewUserPreferences.cs @@ -0,0 +1,35 @@ +using Microsoft.EntityFrameworkCore.Migrations; + +namespace API.Data.Migrations +{ + public partial class NewUserPreferences : Migration + { + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.AddColumn( + name: "AutoCloseMenu", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: false); + + migrationBuilder.AddColumn( + name: "ReaderMode", + table: "AppUserPreferences", + type: "INTEGER", + nullable: false, + defaultValue: 0); + } + + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropColumn( + name: "AutoCloseMenu", + table: "AppUserPreferences"); + + migrationBuilder.DropColumn( + name: "ReaderMode", + table: "AppUserPreferences"); + } + } +} diff --git a/API/Data/Migrations/DataContextModelSnapshot.cs b/API/Data/Migrations/DataContextModelSnapshot.cs index f14402ece..c6d49fc2a 100644 --- a/API/Data/Migrations/DataContextModelSnapshot.cs +++ b/API/Data/Migrations/DataContextModelSnapshot.cs @@ -127,6 +127,9 @@ namespace API.Data.Migrations b.Property("AppUserId") .HasColumnType("INTEGER"); + b.Property("AutoCloseMenu") + .HasColumnType("INTEGER"); + b.Property("BookReaderDarkMode") .HasColumnType("INTEGER"); @@ -151,6 +154,9 @@ namespace API.Data.Migrations b.Property("PageSplitOption") .HasColumnType("INTEGER"); + b.Property("ReaderMode") + .HasColumnType("INTEGER"); + b.Property("ReadingDirection") .HasColumnType("INTEGER"); diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs index 0f725444b..07d7102e1 100644 --- a/API/Data/SeriesRepository.cs +++ b/API/Data/SeriesRepository.cs @@ -289,7 +289,7 @@ namespace API.Data /// /// /// Library to restrict to, if 0, will apply to all libraries - /// How many series to pick. + /// Contains pagination information /// public async Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams) { diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs index ba89d0612..394e6fed1 100644 --- a/API/Data/UnitOfWork.cs +++ b/API/Data/UnitOfWork.cs @@ -29,7 +29,8 @@ namespace API.Data public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context); public ICollectionTagRepository CollectionTagRepository => new CollectionTagRepository(_context, _mapper); - + public IFileRepository FileRepository => new FileRepository(_context); + public bool Commit() { return _context.SaveChanges() > 0; diff --git a/API/Entities/AppUserPreferences.cs b/API/Entities/AppUserPreferences.cs index fb5fe9bc2..149512e00 100644 --- a/API/Entities/AppUserPreferences.cs +++ b/API/Entities/AppUserPreferences.cs @@ -17,7 +17,18 @@ namespace API.Entities /// Manga Reader Option: Which side of a split image should we show first /// public PageSplitOption PageSplitOption { get; set; } = PageSplitOption.SplitRightToLeft; - + /// + /// Manga Reader Option: How the manga reader should perform paging or reading of the file + /// + /// Webtoon uses scrolling to page, MANGA_LR uses paging by clicking left/right side of reader, MANGA_UD uses paging + /// by clicking top/bottom sides of reader. + /// + /// + public ReaderMode ReaderMode { get; set; } + /// + /// Manga Reader Option: Allow the menu to close after 6 seconds without interaction + /// + public bool AutoCloseMenu { get; set; } /// /// Book Reader Option: Should the background color be dark /// diff --git a/API/Entities/Enums/ReaderMode.cs b/API/Entities/Enums/ReaderMode.cs new file mode 100644 index 000000000..04156df24 --- /dev/null +++ b/API/Entities/Enums/ReaderMode.cs @@ -0,0 +1,14 @@ +using System.ComponentModel; + +namespace API.Entities.Enums +{ + public enum ReaderMode + { + [Description("Left and Right")] + MANGA_LR = 0, + [Description("Up and Down")] + MANGA_UP = 1, + [Description("Webtoon")] + WEBTOON = 2 + } +} \ No newline at end of file diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs index 4ea8f1cf4..4d8a48be4 100644 --- a/API/Entities/Series.cs +++ b/API/Entities/Series.cs @@ -32,7 +32,7 @@ namespace API.Entities /// /// Summary information related to the Series /// - public string Summary { get; set; } // TODO: Migrate into SeriesMetdata + public string Summary { get; set; } // TODO: Migrate into SeriesMetdata (with Metadata update) public DateTime Created { get; set; } public DateTime LastModified { get; set; } public byte[] CoverImage { get; set; } diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs index c3db5c08a..e713acbe1 100644 --- a/API/Extensions/ApplicationServiceExtensions.cs +++ b/API/Extensions/ApplicationServiceExtensions.cs @@ -1,5 +1,4 @@ -using System; -using API.Data; +using API.Data; using API.Helpers; using API.Interfaces; using API.Interfaces.Services; @@ -34,7 +33,6 @@ namespace API.Extensions services.AddScoped(); services.AddSqLite(config, env); - services.ConfigRepositories(); services.AddLogging(loggingBuilder => { @@ -56,17 +54,5 @@ namespace API.Extensions return services; } - - private static IServiceCollection ConfigRepositories(this IServiceCollection services) - { - services.AddScoped(); - services.AddScoped(); - - return services; - } - - public static IServiceCollection AddStartupTask(this IServiceCollection services) - where T : class, IStartupTask - => services.AddTransient(); } } \ No newline at end of file diff --git a/API/Extensions/ServiceCollectionExtensions.cs b/API/Extensions/ServiceCollectionExtensions.cs index a9d12b471..1b752431c 100644 --- a/API/Extensions/ServiceCollectionExtensions.cs +++ b/API/Extensions/ServiceCollectionExtensions.cs @@ -16,7 +16,7 @@ namespace API.Extensions { services.AddHttpClient(client => { - client.BaseAddress = new Uri("http://stats.kavitareader.com"); + client.BaseAddress = new Uri("https://kavitastats.majora2007.duckdns.org"); client.DefaultRequestHeaders.Add("api-key", "MsnvA2DfQqxSK5jh"); }); diff --git a/API/Interfaces/IUnitOfWork.cs b/API/Interfaces/IUnitOfWork.cs index df326c3e2..63051d2e3 100644 --- a/API/Interfaces/IUnitOfWork.cs +++ b/API/Interfaces/IUnitOfWork.cs @@ -11,6 +11,7 @@ namespace API.Interfaces ISettingsRepository SettingsRepository { get; } IAppUserProgressRepository AppUserProgressRepository { get; } ICollectionTagRepository CollectionTagRepository { get; } + IFileRepository FileRepository { get; } bool Commit(); Task CommitAsync(); bool HasChanges(); diff --git a/API/Interfaces/Services/IArchiveService.cs b/API/Interfaces/Services/IArchiveService.cs index f77784878..18869b7cd 100644 --- a/API/Interfaces/Services/IArchiveService.cs +++ b/API/Interfaces/Services/IArchiveService.cs @@ -3,7 +3,6 @@ using System.Collections.Generic; using System.IO.Compression; using System.Threading.Tasks; using API.Archive; -using API.Entities; namespace API.Interfaces.Services { diff --git a/API/Program.cs b/API/Program.cs index b084c2ef3..fc906cca1 100644 --- a/API/Program.cs +++ b/API/Program.cs @@ -5,6 +5,7 @@ using System.Threading; using System.Threading.Tasks; using API.Data; using API.Entities; +using API.Services.HostedServices; using Kavita.Common; using Kavita.Common.EnvironmentInfo; using Microsoft.AspNetCore.Hosting; @@ -20,7 +21,7 @@ namespace API { public class Program { - private static int HttpPort; + private static int _httpPort; protected Program() { @@ -48,7 +49,7 @@ namespace API } // Get HttpPort from Config - HttpPort = Configuration.GetPort(GetAppSettingFilename()); + _httpPort = Configuration.GetPort(GetAppSettingFilename()); var host = CreateHostBuilder(args).Build(); @@ -64,7 +65,6 @@ namespace API await context.Database.MigrateAsync(); await Seed.SeedRoles(roleManager); await Seed.SeedSettings(context); - } catch (Exception ex) { @@ -81,7 +81,7 @@ namespace API { webBuilder.UseKestrel((opts) => { - opts.ListenAnyIP(HttpPort, options => + opts.ListenAnyIP(_httpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; }); diff --git a/API/Services/CacheService.cs b/API/Services/CacheService.cs index 2ce9b375b..73c279657 100644 --- a/API/Services/CacheService.cs +++ b/API/Services/CacheService.cs @@ -63,10 +63,6 @@ namespace API.Services } new DirectoryInfo(extractPath).Flatten(); - // if (fileCount > 1) - // { - // new DirectoryInfo(extractPath).Flatten(); - // } return chapter; } diff --git a/API/Services/Clients/StatsApiClient.cs b/API/Services/Clients/StatsApiClient.cs index 10b7ba543..00dddfad3 100644 --- a/API/Services/Clients/StatsApiClient.cs +++ b/API/Services/Clients/StatsApiClient.cs @@ -2,20 +2,25 @@ using System.Net.Http; using System.Net.Http.Json; using System.Threading.Tasks; + +using API.Configurations.CustomOptions; using API.DTOs; using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; namespace API.Services.Clients { public class StatsApiClient { private readonly HttpClient _client; + private readonly StatsOptions _options; private readonly ILogger _logger; - public StatsApiClient(HttpClient client, ILogger logger) + public StatsApiClient(HttpClient client, IOptions options, ILogger logger) { _client = client; _logger = logger; + _options = options.Value ?? throw new ArgumentNullException(nameof(options)); } public async Task SendDataToStatsServer(UsageStatisticsDto data) diff --git a/API/Services/ComicInfo.cs b/API/Services/ComicInfo.cs index 8277cfb35..55e823ee4 100644 --- a/API/Services/ComicInfo.cs +++ b/API/Services/ComicInfo.cs @@ -9,6 +9,7 @@ public string Publisher { get; set; } public string Genre { get; set; } public int PageCount { get; set; } + // ReSharper disable once InconsistentNaming public string LanguageISO { get; set; } public string Web { get; set; } } diff --git a/API/Services/HostedServices/StartupTasksHostedService.cs b/API/Services/HostedServices/StartupTasksHostedService.cs index dcdb22cca..95f87006e 100644 --- a/API/Services/HostedServices/StartupTasksHostedService.cs +++ b/API/Services/HostedServices/StartupTasksHostedService.cs @@ -28,7 +28,7 @@ namespace API.Services.HostedServices { await ManageStartupStatsTasks(scope, taskScheduler); } - catch (Exception e) + catch (Exception) { //If stats startup fail the user can keep using the app } @@ -36,9 +36,9 @@ namespace API.Services.HostedServices private async Task ManageStartupStatsTasks(IServiceScope serviceScope, ITaskScheduler taskScheduler) { - var settingsRepository = serviceScope.ServiceProvider.GetRequiredService(); + var unitOfWork = serviceScope.ServiceProvider.GetRequiredService(); - var settingsDto = await settingsRepository.GetSettingsDtoAsync(); + var settingsDto = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); if (!settingsDto.AllowStatCollection) return; diff --git a/API/Services/StatsService.cs b/API/Services/StatsService.cs index 4d5e3a315..2c315c99d 100644 --- a/API/Services/StatsService.cs +++ b/API/Services/StatsService.cs @@ -25,15 +25,15 @@ namespace API.Services private readonly StatsApiClient _client; private readonly DataContext _dbContext; private readonly ILogger _logger; - private readonly IFileRepository _fileRepository; + private readonly IUnitOfWork _unitOfWork; public StatsService(StatsApiClient client, DataContext dbContext, ILogger logger, - IFileRepository fileRepository) + IUnitOfWork unitOfWork) { _client = client; _dbContext = dbContext; _logger = logger; - _fileRepository = fileRepository; + _unitOfWork = unitOfWork; } private static string FinalPath => Path.Combine(Directory.GetCurrentDirectory(), TempFilePath, TempFileName); @@ -77,9 +77,9 @@ namespace API.Services _logger.LogInformation("Deleting the file from disk"); if (FileExists) File.Delete(FinalPath); } - catch (Exception e) + catch (Exception ex) { - _logger.LogError("Error Finalizing Stats collection flow", e); + _logger.LogError(ex, "Error Finalizing Stats collection flow"); throw; } } @@ -121,7 +121,7 @@ namespace API.Services .Select(x => new LibInfo {Type = x.Key, Count = x.Count()}) .ToArrayAsync(); - var uniqueFileTypes = await _fileRepository.GetFileExtensions(); + var uniqueFileTypes = await _unitOfWork.FileRepository.GetFileExtensions(); var usageInfo = new UsageInfoDto { diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..ab4a8a30e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,56 @@ +# How to Contribute # + +We're always looking for people to help make Kavita even better, there are a number of ways to contribute. + +## Documentation ## +Setup guides, FAQ, the more information we have on the [wiki](https://github.com/Kareadita/Kavita/wiki) the better. + +## Development ## + +### Tools required ### +- Visual Studio 2019 or higher (https://www.visualstudio.com/vs/). The community version is free and works fine. [Download it here](https://www.visualstudio.com/downloads/). +- Rider (optional to Visual Studio) (https://www.jetbrains.com/rider/) +- HTML/Javascript editor of choice (VS Code/Sublime Text/Webstorm/Atom/etc) +- [Git](https://git-scm.com/downloads) +- [NodeJS](https://nodejs.org/en/download/) (Node 14.X.X or higher) +- .NET 5.0+ + +### Getting started ### + +1. Fork Kavita +2. Clone the repository into your development machine. [*info*](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository-from-github) + - Kavita as of v0.4.2 requires Kavita-webui to be cloned next to the Kavita. Fork and clone this as well. +3. Install the required Node Packages + - cd kavita-webui + - `npm install` + - `npm install -g @angular/cli` +4. Start webui server `ng serve` +5. Build the project in Visual Studio/Rider, Setting startup project to `API` +6. Debug the project in Visual Studio/Rider +7. Open http://localhost:4200 +8. (Deployment only) Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs. + + +### Contributing Code ### +- If you're adding a new, already requested feature, please comment on [Github Issues](https://github.com/Kareadita/Kavita/issues "Github Issues") so work is not duplicated (If you want to add something not already on there, please talk to us first) +- Rebase from Kavita's develop branch, don't merge +- Make meaningful commits, or squash them +- Feel free to make a pull request before work is complete, this will let us see where its at and make comments/suggest improvements +- Reach out to us on the discord if you have any questions +- Add tests (unit/integration) +- Commit with *nix line endings for consistency (We checkout Windows and commit *nix) +- One feature/bug fix per pull request to keep things clean and easy to understand +- Use 4 spaces instead of tabs, this is the default for VS 2019 and WebStorm (to my knowledge) + - Use 2 spaces for Kavita-webui files + +### Pull Requesting ### +- Only make pull requests to develop, never master, if you make a PR to master we'll comment on it and close it +- You're probably going to get some comments or questions from us, they will be to ensure consistency and maintainability +- We'll try to respond to pull requests as soon as possible, if its been a day or two, please reach out to us, we may have missed it +- Each PR should come from its own [feature branch](http://martinfowler.com/bliki/FeatureBranch.html) not develop in your fork, it should have a meaningful branch name (what is being added/fixed) + - new-feature (Good) + - fix-bug (Good) + - patch (Bad) + - develop (Bad) + +If you have any questions about any of this, please let us know. diff --git a/Kavita.Common/Kavita.Common.csproj b/Kavita.Common/Kavita.Common.csproj index 673974db1..3b92d09f8 100644 --- a/Kavita.Common/Kavita.Common.csproj +++ b/Kavita.Common/Kavita.Common.csproj @@ -4,7 +4,7 @@ net5.0 kareadita.github.io Kavita - 0.4.1.1 + 0.4.2.0 en diff --git a/Logo/dottrace.svg b/Logo/dottrace.svg new file mode 100644 index 000000000..b879517cd --- /dev/null +++ b/Logo/dottrace.svg @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/jetbrains.svg b/Logo/jetbrains.svg new file mode 100644 index 000000000..75d4d2177 --- /dev/null +++ b/Logo/jetbrains.svg @@ -0,0 +1,66 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/kavita.svg b/Logo/kavita.svg new file mode 100644 index 000000000..f56f8a7c5 --- /dev/null +++ b/Logo/kavita.svg @@ -0,0 +1,124 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/resharper.svg b/Logo/resharper.svg new file mode 100644 index 000000000..24c987a78 --- /dev/null +++ b/Logo/resharper.svg @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Logo/rider.svg b/Logo/rider.svg new file mode 100644 index 000000000..82da35b0b --- /dev/null +++ b/Logo/rider.svg @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + rider + + + + + + + + + + + + + + diff --git a/Logo/sentry.svg b/Logo/sentry.svg new file mode 100644 index 000000000..40bd18594 --- /dev/null +++ b/Logo/sentry.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/README.md b/README.md index a3fd09193..2dda67528 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kavita +# []() Kavita
![Cover Image](https://github.com/Kareadita/kareadita.github.io/blob/main/img/features/seriesdetail.PNG?raw=true) @@ -9,44 +9,41 @@ your reading collection with your friends and family! [![Release](https://img.shields.io/github/release/Kareadita/Kavita.svg?style=flat&maxAge=3600)](https://github.com/Kareadita/Kavita/releases) [![License](https://img.shields.io/badge/license-GPLv3-blue.svg?style=flat)](https://github.com/Kareadita/Kavita/blob/master/LICENSE) -[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem) [![Downloads](https://img.shields.io/github/downloads/Kareadita/Kavita/total.svg?style=flat)](https://github.com/Kareadita/Kavita/releases) [![Docker Pulls](https://img.shields.io/docker/pulls/kizaing/kavita.svg)](https://hub.docker.com/r/kizaing/kavita/) -[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=alert_status)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=security_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) +[![Backers on Open Collective](https://opencollective.com/kavita/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/kavita/sponsors/badge.svg)](#sponsors)
-## Goals: +## Goals - [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf) -- [x] First class responsive readers that work great on any device +- [x] First class responsive readers that work great on any device (phone, tablet, desktop) - [x] Provide a dark theme for web app - [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books - [ ] Metadata should allow for collections, want to read integration from 3rd party services, genres. - [x] Ability to manage users, access, and ratings - [ ] Ability to sync ratings and reviews to external services -- [x] Fully Accessible +- [x] Fully Accessible with active accessibility audits +- [x] Dedicated webtoon reader - [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects) +## Support +[![Reddit](https://img.shields.io/badge/reddit-discussion-FF4500.svg?maxAge=60)](https://www.reddit.com/r/KavitaManga/) +[![Discord](https://img.shields.io/badge/discord-chat-7289DA.svg?maxAge=60)](https://discord.gg/eczRp9eeem) +[![GitHub - Bugs and Feature Requests Only](https://img.shields.io/badge/github-issues-red.svg?maxAge=60)](https://github.com/Kareadita/Kavita/issues) -# How to contribute -- Ensure you've cloned Kavita-webui. You should have Projects/Kavita and Projects/Kavita-webui -- In Kavita-webui, run ng serve. This will start the webserver on localhost:4200 -- Run API project in Kavita, this will start the backend on localhost:5000 - - -## Deploy local build -- Run build.sh and pass the Runtime Identifier for your OS or just build.sh for all supported RIDs. - -## How to install +## Setup +### Non-Docker - Unzip the archive for your target OS - Place in a directory that is writable. If on windows, do not place in Program Files - Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once) - Run Kavita - If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices. -## Docker +### Docker Running your Kavita server in docker is super easy! Barely an inconvenience. You can run it with this command: ``` @@ -72,17 +69,51 @@ services: restart: unless-stopped ``` -**Note: Kavita is under heavy development and is being updated all the time, so the tag for current builds is :nightly. The :latest tag will be the latest stable release. There is also the :alpine tag if you want a smaller image, but it is only available for x64 systems.** +**Note: Kavita is under heavy development and is being updated all the time, so the tag for current builds is `:nightly`. The `:latest` tag will be the latest stable release. There is also the `:alpine` tag if you want a smaller image, but it is only available for x64 systems.** -## Got an Idea? -Got a great idea? Throw it up on the FeatHub or vote on another persons. Please check the [Project Board](https://github.com/Kareadita/Kavita/projects) first for a list of planned features. +## Feature Requests +Got a great idea? Throw it up on the FeatHub or vote on another idea. Please check the [Project Board](https://github.com/Kareadita/Kavita/projects) first for a list of planned features. [![Feature Requests](https://feathub.com/Kareadita/Kavita?format=svg)](https://feathub.com/Kareadita/Kavita) -## Want to help? -I am looking for developers with a passion for building the next Plex for Reading. Developers with C#/ASP.NET, Angular 11 please reach out on [Discord](https://discord.gg/eczRp9eeem). + +## Contributors + +This project exists thanks to all the people who contribute. [Contribute](CONTRIBUTING.md). + + ## Donate -If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will -likely go towards beer or hosting. +If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will go towards +expenses related to Kavita. You can back us through OpenCollective. + [![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) + +## Backers + +Thank you to all our backers! 🙏 [Become a backer](https://opencollective.com/Kavita#backer) + + + +## Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [Become a sponsor](https://opencollective.com/Kavita#sponsor) + + + +## Mega Sponsors + + +## JetBrains +Thank you to [ JetBrains](http://www.jetbrains.com/) for providing us with free licenses to their great tools. + +* [ Rider](http://www.jetbrains.com/rider/) +* [ dotTrace](http://www.jetbrains.com/dottrace/) + +## Sentry +Thank you to [ Sentry](https://sentry.io/welcome/) for providing us with free license to their software. + +### License + +* [GNU GPL v3](http://www.gnu.org/licenses/gpl.html) +* Copyright 2010-2021 \ No newline at end of file diff --git a/build.sh b/build.sh index d32da32dc..adb753827 100644 --- a/build.sh +++ b/build.sh @@ -48,9 +48,15 @@ Build() BuildUI() { ProgressStart 'Building UI' + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* cd ../Kavita-webui/ || exit + echo 'Installing web dependencies' npm install + echo 'Building UI' npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot cd ../Kavita/ || exit ProgressEnd 'Building UI' } @@ -68,6 +74,9 @@ Package() cd API echo dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework dotnet publish -c Release --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + + echo "Recopying wwwroot due to bug" + cp -r ./wwwroot/* $lOutputFolder/wwwroot echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt From af8c6c2cb9ea32fc86bc3e0f27910e980d6f6eed Mon Sep 17 00:00:00 2001 From: Kizaing Date: Thu, 24 Jun 2021 21:58:20 -0400 Subject: [PATCH 34/55] Fixed file pathing for automated docker builds (#323) --- .github/workflows/nightly-docker.yml | 1 - .github/workflows/stable-docker.yml | 13 ++++++++++++- action-build.sh | 4 ---- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 329006f14..87fe872a5 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -28,7 +28,6 @@ jobs: cd Kavita-webui/ || exit npm install npm run prod - mv dist/ ../API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index 7b8a68c6e..eda639b37 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -28,7 +28,6 @@ jobs: cd Kavita-webui/ || exit npm install npm run prod - mv dist/ ../API/wwwroot cd ../ || exit - name: Compile dotnet app @@ -61,3 +60,15 @@ jobs: - name: Image digest run: echo ${{ steps.docker_build.outputs.digest }} + + - name: Image digest + run: echo ${{ steps.docker_build.outputs.digest }} + + - name: Notify Discord + uses: rjstone/discord-webhook-notify@v1 + with: + severity: info + description: + details: 'https://hub.docker.com/r/kizaing/kavita/tags?page=1&ordering=last_updated' + text: A new stable build has been released for docker. + webhookUrl: ${{ secrets.DISCORD_DOCKER_UPDATE_URL }} \ No newline at end of file diff --git a/action-build.sh b/action-build.sh index 559765710..9fd94217c 100755 --- a/action-build.sh +++ b/action-build.sh @@ -21,7 +21,6 @@ Build() slnFile=Kavita.sln - dotnet clean $slnFile -c Debug dotnet clean $slnFile -c Release dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID @@ -48,9 +47,6 @@ Package() echo "Copying LICENSE" cp ../LICENSE "$lOutputFolder"/LICENSE.txt - - echo "Renaming API -> Kavita" - mv "$lOutputFolder"/API "$lOutputFolder"/Kavita echo "Creating tar" cd ../$outputFolder/"$runtime"/ From f50a21dd2226daae0d0ee1da0c5c74296cf8a7cf Mon Sep 17 00:00:00 2001 From: Kizaing Date: Thu, 24 Jun 2021 22:19:42 -0400 Subject: [PATCH 35/55] Hotfix/docker workflow (#324) * Fixed file pathing for automated docker builds * Added back in renaming the Kavita executable Co-authored-by: Chris Plaatjes --- action-build.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/action-build.sh b/action-build.sh index 9fd94217c..faaaa4f01 100755 --- a/action-build.sh +++ b/action-build.sh @@ -42,6 +42,9 @@ Package() echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + echo "Renaming API -> Kavita" + mv "$lOutputFolder"/API "$lOutputFolder"/Kavita + echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt From dfddb551d258deda70ec171e4e32efa519c64cc4 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 25 Jun 2021 09:15:19 -0500 Subject: [PATCH 36/55] Fixed build issues (#325) * Fixed build issues * Fixed up workflows --- .github/workflows/nightly-docker.yml | 6 ++++++ .github/workflows/stable-docker.yml | 6 ++++++ action-build.sh | 16 ++++++++++++++++ build.sh | 3 +-- docker-build.sh | 13 +++++++++++++ 5 files changed, 42 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 87fe872a5..38d243d8f 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -25,9 +25,15 @@ jobs: with: node-version: '14' - run: | + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* cd Kavita-webui/ || exit + echo 'Installing web dependencies' npm install + echo 'Building UI' npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index eda639b37..ab57ecb82 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -25,9 +25,15 @@ jobs: with: node-version: '14' - run: | + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* cd Kavita-webui/ || exit + echo 'Installing web dependencies' npm install + echo 'Building UI' npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/action-build.sh b/action-build.sh index faaaa4f01..4571c93e9 100755 --- a/action-build.sh +++ b/action-build.sh @@ -59,6 +59,22 @@ Package() } +BuildUI() +{ + ProgressStart 'Building UI' + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* + cd ../Kavita-webui/ || exit + echo 'Installing web dependencies' + npm install + echo 'Building UI' + npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot + cd ../Kavita/ || exit + ProgressEnd 'Building UI' +} + dir=$PWD if [ -d _output ] diff --git a/build.sh b/build.sh index adb753827..7e137a790 100644 --- a/build.sh +++ b/build.sh @@ -32,7 +32,6 @@ Build() slnFile=Kavita.sln - #dotnet clean $slnFile -c Debug dotnet clean $slnFile -c Release if [[ -z "$RID" ]]; @@ -102,8 +101,8 @@ Package() RID="$1" -Build BuildUI +Build dir=$PWD diff --git a/docker-build.sh b/docker-build.sh index c97478910..a0adc4bbf 100644 --- a/docker-build.sh +++ b/docker-build.sh @@ -37,6 +37,19 @@ BuildUI() npm run prod cd ../Kavita/ || exit ProgressEnd 'Building UI' + + ProgressStart 'Building UI' + echo 'Removing old wwwroot' + rm -rf API/wwwroot/* + cd ../Kavita-webui/ || exit + echo 'Installing web dependencies' + npm install + echo 'Building UI' + npm run prod + echo 'Copying back to Kavita wwwroot' + cp -r dist/* ../Kavita/API/wwwroot + cd ../Kavita/ || exit + ProgressEnd 'Building UI' } Package() From 7f141ad7613d2d58ba167ba276e79dbb769037bc Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 25 Jun 2021 09:24:09 -0500 Subject: [PATCH 37/55] Github build actions --- .github/workflows/nightly-docker.yml | 2 +- .github/workflows/stable-docker.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 38d243d8f..f948deb15 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -33,7 +33,7 @@ jobs: echo 'Building UI' npm run prod echo 'Copying back to Kavita wwwroot' - cp -r dist/* ../Kavita/API/wwwroot + cp -r dist/* ../API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index ab57ecb82..58779e580 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -33,7 +33,7 @@ jobs: echo 'Building UI' npm run prod echo 'Copying back to Kavita wwwroot' - cp -r dist/* ../Kavita/API/wwwroot + cp -r dist/* ../API/wwwroot cd ../ || exit - name: Compile dotnet app From a97bd1bd540030beb45b511f49dcfa2c6f463629 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 25 Jun 2021 09:25:38 -0500 Subject: [PATCH 38/55] Github build actions (#326) --- .github/workflows/nightly-docker.yml | 2 +- .github/workflows/stable-docker.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 38d243d8f..f948deb15 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -33,7 +33,7 @@ jobs: echo 'Building UI' npm run prod echo 'Copying back to Kavita wwwroot' - cp -r dist/* ../Kavita/API/wwwroot + cp -r dist/* ../API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index ab57ecb82..58779e580 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -33,7 +33,7 @@ jobs: echo 'Building UI' npm run prod echo 'Copying back to Kavita wwwroot' - cp -r dist/* ../Kavita/API/wwwroot + cp -r dist/* ../API/wwwroot cd ../ || exit - name: Compile dotnet app From 61c8ce36e0c861e8e6b21b89e3b13f190d506213 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 25 Jun 2021 09:41:22 -0500 Subject: [PATCH 39/55] Bugfix/build 2 (#327) * Github build actions * More fixes --- .github/workflows/nightly-docker.yml | 3 +-- .github/workflows/stable-docker.yml | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index f948deb15..d7a5fc6ee 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -25,14 +25,13 @@ jobs: with: node-version: '14' - run: | - echo 'Removing old wwwroot' - rm -rf API/wwwroot/* cd Kavita-webui/ || exit echo 'Installing web dependencies' npm install echo 'Building UI' npm run prod echo 'Copying back to Kavita wwwroot' + mkdir ../API/wwwroot cp -r dist/* ../API/wwwroot cd ../ || exit diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index 58779e580..a0cb9c598 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -33,6 +33,7 @@ jobs: echo 'Building UI' npm run prod echo 'Copying back to Kavita wwwroot' + mkdir ../API/wwwroot cp -r dist/* ../API/wwwroot cd ../ || exit From dfd4b1ed6f28576d5017672dd2325efa04539d17 Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 10:57:26 -0400 Subject: [PATCH 40/55] adding ls -l to workflow for integrity check (#328) --- .github/workflows/nightly-docker.yml | 4 ++++ .github/workflows/stable-docker.yml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index d7a5fc6ee..153f9f5dd 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -30,9 +30,13 @@ jobs: npm install echo 'Building UI' npm run prod + echo 'Checking dist folder for compiling integrity' + ls -l dist echo 'Copying back to Kavita wwwroot' mkdir ../API/wwwroot cp -r dist/* ../API/wwwroot + echo 'Checking Kavita wwwroot for copying integrity' + ls -l ../API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index a0cb9c598..ed28452c5 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -32,9 +32,13 @@ jobs: npm install echo 'Building UI' npm run prod + echo 'Checking dist folder for compiling integrity' + ls -l dist echo 'Copying back to Kavita wwwroot' mkdir ../API/wwwroot cp -r dist/* ../API/wwwroot + echo 'Checking Kavita wwwroot for copying integrity' + ls -l ../API/wwwroot cd ../ || exit - name: Compile dotnet app From 9b4712348401be3dd33d87f7739f6e45906691af Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 11:14:16 -0400 Subject: [PATCH 41/55] logging for dotnet ui build (#329) --- action-build.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/action-build.sh b/action-build.sh index 4571c93e9..3c396085a 100755 --- a/action-build.sh +++ b/action-build.sh @@ -69,8 +69,10 @@ BuildUI() npm install echo 'Building UI' npm run prod + ls -l dist echo 'Copying back to Kavita wwwroot' cp -r dist/* ../Kavita/API/wwwroot + ls -l ../Kavita/API/wwwroot cd ../Kavita/ || exit ProgressEnd 'Building UI' } From f29f2f67299e6c168c820ab0cd32ba29f639e608 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 25 Jun 2021 10:21:28 -0500 Subject: [PATCH 42/55] Stat API Url change (#330) * Changed stat url to correct one --- API/Extensions/ServiceCollectionExtensions.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/API/Extensions/ServiceCollectionExtensions.cs b/API/Extensions/ServiceCollectionExtensions.cs index 1b752431c..62a0575ef 100644 --- a/API/Extensions/ServiceCollectionExtensions.cs +++ b/API/Extensions/ServiceCollectionExtensions.cs @@ -16,7 +16,7 @@ namespace API.Extensions { services.AddHttpClient(client => { - client.BaseAddress = new Uri("https://kavitastats.majora2007.duckdns.org"); + client.BaseAddress = new Uri("https://stats.kavitareader.com"); client.DefaultRequestHeaders.Add("api-key", "MsnvA2DfQqxSK5jh"); }); From 50bdb715b7bd38c8dea8b62d5f6069b1d9604817 Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 12:33:02 -0400 Subject: [PATCH 43/55] adding debugging and path change (#332) --- .github/workflows/nightly-docker.yml | 6 +++--- action-build.sh | 6 ++++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 153f9f5dd..98556c37f 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -33,10 +33,10 @@ jobs: echo 'Checking dist folder for compiling integrity' ls -l dist echo 'Copying back to Kavita wwwroot' - mkdir ../API/wwwroot - cp -r dist/* ../API/wwwroot + mkdir ../Kavita/API/wwwroot + cp -r dist/* ../Kavita/API/wwwroot echo 'Checking Kavita wwwroot for copying integrity' - ls -l ../API/wwwroot + ls -l ../Kavita/API/wwwroot cd ../ || exit - name: Compile dotnet app diff --git a/action-build.sh b/action-build.sh index 3c396085a..ce7e5ed75 100755 --- a/action-build.sh +++ b/action-build.sh @@ -42,9 +42,15 @@ Package() echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework + echo "Integrity check on API wwwroot folder" + ls -l "$lOutputFolder"/API/wwwroot + echo "Renaming API -> Kavita" mv "$lOutputFolder"/API "$lOutputFolder"/Kavita + echo "Integrity check on Kavita wwwroot folder" + ls -l "$lOutputFolder"/Kavita/wwwroot + echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt From 0533de7420852801c27776797479a8a2349b2b2f Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 12:51:25 -0400 Subject: [PATCH 44/55] Bugfix/workflow test (#333) * adding debugging and path change * more logging and change to rsync --- .github/workflows/nightly-docker.yml | 11 +++++++++-- action-build.sh | 3 +++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index 98556c37f..b7f49dcf3 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -25,16 +25,23 @@ jobs: with: node-version: '14' - run: | + echo 'Checking folder structure' + ls -l + cd Kavita-webui/ || exit echo 'Installing web dependencies' npm install + echo 'Building UI' npm run prod + echo 'Checking dist folder for compiling integrity' ls -l dist + echo 'Copying back to Kavita wwwroot' - mkdir ../Kavita/API/wwwroot - cp -r dist/* ../Kavita/API/wwwroot + mkdir ../API/wwwroot + rsync -a dist/ ../API/wwwroot/ + echo 'Checking Kavita wwwroot for copying integrity' ls -l ../Kavita/API/wwwroot cd ../ || exit diff --git a/action-build.sh b/action-build.sh index ce7e5ed75..4306c7aee 100755 --- a/action-build.sh +++ b/action-build.sh @@ -34,6 +34,9 @@ Package() local runtime="$2" local lOutputFolder=../_output/"$runtime"/Kavita + echo "Integrity check on root folder" + ls -l + ProgressStart "Creating $runtime Package for $framework" # TODO: Use no-restore? Because Build should have already done it for us From acbd4ce2527fa3c80a1a4373ee6badf50b3eab67 Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 13:21:33 -0400 Subject: [PATCH 45/55] testing out rsync (#334) --- .github/workflows/nightly-docker.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index b7f49dcf3..e7c841bb3 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -39,11 +39,10 @@ jobs: ls -l dist echo 'Copying back to Kavita wwwroot' - mkdir ../API/wwwroot rsync -a dist/ ../API/wwwroot/ echo 'Checking Kavita wwwroot for copying integrity' - ls -l ../Kavita/API/wwwroot + ls -l ../API/wwwroot cd ../ || exit - name: Compile dotnet app From 733460e530c05bda128ba4a28a4b8b8bbf8a4031 Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 13:32:11 -0400 Subject: [PATCH 46/55] fixing ls (#335) --- action-build.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/action-build.sh b/action-build.sh index 4306c7aee..14fb8a086 100755 --- a/action-build.sh +++ b/action-build.sh @@ -46,13 +46,13 @@ Package() dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework echo "Integrity check on API wwwroot folder" - ls -l "$lOutputFolder"/API/wwwroot + ls -l "$lOutputFolder"/wwwroot echo "Renaming API -> Kavita" mv "$lOutputFolder"/API "$lOutputFolder"/Kavita echo "Integrity check on Kavita wwwroot folder" - ls -l "$lOutputFolder"/Kavita/wwwroot + ls -l "$lOutputFolder"/wwwroot echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt From be56b33baf5278a7c4c4176d51c2fbff111d547b Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Fri, 25 Jun 2021 15:27:39 -0400 Subject: [PATCH 47/55] Fixing workflow (#337) * Added extra copy for webui when building images --- action-build.sh | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/action-build.sh b/action-build.sh index 14fb8a086..b4c82b6d0 100755 --- a/action-build.sh +++ b/action-build.sh @@ -34,9 +34,6 @@ Package() local runtime="$2" local lOutputFolder=../_output/"$runtime"/Kavita - echo "Integrity check on root folder" - ls -l - ProgressStart "Creating $runtime Package for $framework" # TODO: Use no-restore? Because Build should have already done it for us @@ -45,14 +42,11 @@ Package() echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework - echo "Integrity check on API wwwroot folder" - ls -l "$lOutputFolder"/wwwroot - echo "Renaming API -> Kavita" mv "$lOutputFolder"/API "$lOutputFolder"/Kavita - echo "Integrity check on Kavita wwwroot folder" - ls -l "$lOutputFolder"/wwwroot + echo "Copying webui wwwroot to build" + cp -r wwwroot/* "$lOutputFolder"/wwwroot/ echo "Copying Install information" cp ../INSTALL.txt "$lOutputFolder"/README.txt From 6b645737fa086156066a9691a48ee93827aafc9b Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Fri, 25 Jun 2021 15:08:55 -0500 Subject: [PATCH 48/55] Stats API Url Fixed (#338) * Fixed the API call by removing baseAddress and performing concatination in real time --- API/Extensions/ServiceCollectionExtensions.cs | 1 - API/Services/Clients/StatsApiClient.cs | 4 +++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/API/Extensions/ServiceCollectionExtensions.cs b/API/Extensions/ServiceCollectionExtensions.cs index 62a0575ef..3559f3856 100644 --- a/API/Extensions/ServiceCollectionExtensions.cs +++ b/API/Extensions/ServiceCollectionExtensions.cs @@ -16,7 +16,6 @@ namespace API.Extensions { services.AddHttpClient(client => { - client.BaseAddress = new Uri("https://stats.kavitareader.com"); client.DefaultRequestHeaders.Add("api-key", "MsnvA2DfQqxSK5jh"); }); diff --git a/API/Services/Clients/StatsApiClient.cs b/API/Services/Clients/StatsApiClient.cs index 00dddfad3..e07bec222 100644 --- a/API/Services/Clients/StatsApiClient.cs +++ b/API/Services/Clients/StatsApiClient.cs @@ -1,6 +1,7 @@ using System; using System.Net.Http; using System.Net.Http.Json; +using System.Threading; using System.Threading.Tasks; using API.Configurations.CustomOptions; @@ -15,6 +16,7 @@ namespace API.Services.Clients private readonly HttpClient _client; private readonly StatsOptions _options; private readonly ILogger _logger; + private const string ApiUrl = "http://stats.kavitareader.com"; public StatsApiClient(HttpClient client, IOptions options, ILogger logger) { @@ -29,7 +31,7 @@ namespace API.Services.Clients try { - var response = await _client.PostAsJsonAsync("/api/InstallationStats", data); + using var response = await _client.PostAsJsonAsync(ApiUrl + "/api/InstallationStats", data); responseContent = await response.Content.ReadAsStringAsync(); From 471b49e9596bb7d053b4cdbc5294a8fd648003fa Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Sat, 26 Jun 2021 14:03:29 -0500 Subject: [PATCH 49/55] Update README.md Fixed up the wording on readme --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 2dda67528..2ca70597c 100644 --- a/README.md +++ b/README.md @@ -19,15 +19,16 @@ your reading collection with your friends and family! ## Goals -- [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf) +- [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar (RAR5 not supported), 7zip, raw images) and Books (epub, pdf) - [x] First class responsive readers that work great on any device (phone, tablet, desktop) -- [x] Provide a dark theme for web app +- [x] Dark and Light themes +- [x] Dedicated ebook reader, manga/comic reader, and webtoon reader - [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books - [ ] Metadata should allow for collections, want to read integration from 3rd party services, genres. -- [x] Ability to manage users, access, and ratings +- [x] Ability to manage users, access, and ratings/reviews - [ ] Ability to sync ratings and reviews to external services - [x] Fully Accessible with active accessibility audits -- [x] Dedicated webtoon reader +- [x] No requried folder structures - [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects) ## Support @@ -80,12 +81,11 @@ Got a great idea? Throw it up on the FeatHub or vote on another idea. Please che ## Contributors This project exists thanks to all the people who contribute. [Contribute](CONTRIBUTING.md). - - + ## Donate If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will go towards -expenses related to Kavita. You can back us through OpenCollective. +expenses related to Kavita. You can back us through [OpenCollective](https://opencollective.com/Kavita#sponsor) and get your name in the README as thanks. [![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) @@ -116,4 +116,4 @@ Thank you to [ Sentry](https://sen ### License * [GNU GPL v3](http://www.gnu.org/licenses/gpl.html) -* Copyright 2010-2021 \ No newline at end of file +* Copyright 2010-2021 From b0b64cf63547e19e35d08ac12dcdba3bcc7b4280 Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Sat, 26 Jun 2021 16:33:30 -0500 Subject: [PATCH 50/55] Added demo to readme --- README.md | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2dda67528..0c662fb68 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,15 @@ your reading collection with your friends and family! [![Sponsors on Open Collective](https://opencollective.com/kavita/sponsors/badge.svg)](#sponsors) -## Goals +# Demo +[https://demo.kavitareader.com/](https://demo.kavitareader.com/) + +``` +Username: demouser +Password: Demouser64 +``` + +## Features - [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf) - [x] First class responsive readers that work great on any device (phone, tablet, desktop) - [x] Provide a dark theme for web app @@ -38,8 +46,8 @@ your reading collection with your friends and family! ## Setup ### Non-Docker - Unzip the archive for your target OS -- Place in a directory that is writable. If on windows, do not place in Program Files -- Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once) +- Place in a directory that is writable. If on windows, do not place in Program Files, this folder is protected by Windows. +- Linux users must ensure the directory is writable by Kavita (chown +x Kavita) - Run Kavita - If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices. From f18285db930c65322881c078b4ba8e0c537caaf5 Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Sun, 27 Jun 2021 09:08:17 -0400 Subject: [PATCH 51/55] chapter and issue parsing for comics (#343) * chapter and issue parsing for comics * fixing comment --- API.Tests/Parser/ComicParserTests.cs | 4 ++++ API/Parser/Parser.cs | 20 ++++++++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/API.Tests/Parser/ComicParserTests.cs b/API.Tests/Parser/ComicParserTests.cs index 9d91a5feb..6e33dd89c 100644 --- a/API.Tests/Parser/ComicParserTests.cs +++ b/API.Tests/Parser/ComicParserTests.cs @@ -20,6 +20,8 @@ namespace API.Tests.Parser [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")] [InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")] [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")] + [InlineData("Amazing Man Comics chapter 25", "Amazing Man Comics")] + [InlineData("Amazing Man Comics issue #25", "Amazing Man Comics")] public void ParseComicSeriesTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); @@ -40,6 +42,7 @@ namespace API.Tests.Parser [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] [InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "2")] [InlineData("Superman v1 024 (09-10 1943)", "1")] + [InlineData("Amazing Man Comics chapter 25", "0")] public void ParseComicVolumeTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); @@ -61,6 +64,7 @@ namespace API.Tests.Parser [InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "1")] [InlineData("Superman v1 024 (09-10 1943)", "24")] [InlineData("Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr", "70.5")] + [InlineData("Amazing Man Comics chapter 25", "25")] public void ParseComicChapterTest(string filename, string expected) { Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs index e6ac20a1f..5bfe954e8 100644 --- a/API/Parser/Parser.cs +++ b/API/Parser/Parser.cs @@ -200,6 +200,14 @@ namespace API.Parser new Regex( @"^(?.*)(?: |_)v\d+", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics chapter 25 + new Regex( + @"^(?.*)(?: |_)c(hapter) \d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics issue #25 + new Regex( + @"^(?.*)(?: |_)i(ssue) #\d+", + RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( @"^(?.*)(?: \d+)", @@ -242,11 +250,11 @@ namespace API.Parser RegexOptions.IgnoreCase | RegexOptions.Compiled), // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) new Regex( - @"^(?.*)(?: |_)(?\d+)", + @"^(?.*)(?\d+)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) new Regex( - @"^(?.*)(?\d+))", + @"^(?.*)(?\d+))", RegexOptions.IgnoreCase | RegexOptions.Compiled), // Batman & Robin the Teen Wonder #0 new Regex( @@ -284,6 +292,14 @@ namespace API.Parser new Regex( @"^(?.*)(?: |_)(c? ?)(?(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics chapter 25 + new Regex( + @"^(?!Vol)(?.*)( |_)c(hapter)( |_)(?\d*)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), + // Amazing Man Comics issue #25 + new Regex( + @"^(?!Vol)(?.*)( |_)i(ssue)( |_) #(?\d*)", + RegexOptions.IgnoreCase | RegexOptions.Compiled), }; private static readonly Regex[] ReleaseGroupRegex = new[] From 9534618f4b9792cda861d7b4e6ed471646441feb Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Wed, 30 Jun 2021 13:40:04 -0400 Subject: [PATCH 52/55] Set dark mode to true and migration (#348) * Default to dark mode for application --- API/Entities/AppUserPreferences.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/API/Entities/AppUserPreferences.cs b/API/Entities/AppUserPreferences.cs index 149512e00..e78c4b015 100644 --- a/API/Entities/AppUserPreferences.cs +++ b/API/Entities/AppUserPreferences.cs @@ -57,10 +57,11 @@ namespace API.Entities /// Book Reader Option: What direction should the next/prev page buttons go /// public ReadingDirection BookReaderReadingDirection { get; set; } = ReadingDirection.LeftToRight; + /// /// UI Site Global Setting: Whether the UI should render in Dark mode or not. /// - public bool SiteDarkMode { get; set; } + public bool SiteDarkMode { get; set; } = true; From 4d41ebfc88e1a5f037f27ac4fc8be9efea54729c Mon Sep 17 00:00:00 2001 From: Joseph Milazzo Date: Wed, 30 Jun 2021 12:40:21 -0500 Subject: [PATCH 53/55] Small readme changes --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 2dda67528..b65d1f2a1 100644 --- a/README.md +++ b/README.md @@ -21,13 +21,13 @@ your reading collection with your friends and family! ## Goals - [x] Serve up Manga/Webtoons/Comics (cbr, cbz, zip/rar, 7zip, raw images) and Books (epub, mobi, azw, djvu, pdf) - [x] First class responsive readers that work great on any device (phone, tablet, desktop) -- [x] Provide a dark theme for web app +- [x] Dark and Light themes - [ ] Provide hooks into metadata providers to fetch metadata for Comics, Manga, and Books - [ ] Metadata should allow for collections, want to read integration from 3rd party services, genres. - [x] Ability to manage users, access, and ratings - [ ] Ability to sync ratings and reviews to external services - [x] Fully Accessible with active accessibility audits -- [x] Dedicated webtoon reader +- [x] Dedicated webtoon reader (in beta testing) - [ ] And so much [more...](https://github.com/Kareadita/Kavita/projects) ## Support From 20ba41c38f1ede64faf774b96d87acd74d0d5850 Mon Sep 17 00:00:00 2001 From: Robbie Davis Date: Wed, 30 Jun 2021 16:31:57 -0400 Subject: [PATCH 54/55] api url change (#349) - Changed stats api url to https - Removed paypal from readme. People can now only donate through opencollective. --- API/Services/Clients/StatsApiClient.cs | 2 +- README.md | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/API/Services/Clients/StatsApiClient.cs b/API/Services/Clients/StatsApiClient.cs index e07bec222..d56f53707 100644 --- a/API/Services/Clients/StatsApiClient.cs +++ b/API/Services/Clients/StatsApiClient.cs @@ -16,7 +16,7 @@ namespace API.Services.Clients private readonly HttpClient _client; private readonly StatsOptions _options; private readonly ILogger _logger; - private const string ApiUrl = "http://stats.kavitareader.com"; + private const string ApiUrl = "https://stats.kavitareader.com"; public StatsApiClient(HttpClient client, IOptions options, ILogger logger) { diff --git a/README.md b/README.md index b65d1f2a1..60db687a1 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,6 @@ your reading collection with your friends and family! [![Docker Pulls](https://img.shields.io/docker/pulls/kizaing/kavita.svg)](https://hub.docker.com/r/kizaing/kavita/) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=Kareadita_Kavita&metric=security_rating)](https://sonarcloud.io/dashboard?id=Kareadita_Kavita) -[![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) [![Backers on Open Collective](https://opencollective.com/kavita/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/kavita/sponsors/badge.svg)](#sponsors) @@ -85,9 +84,7 @@ This project exists thanks to all the people who contribute. [Contribute](CONTRI ## Donate If you like Kavita, have gotten good use out of it or feel like you want to say thanks with a few bucks, feel free to donate. Money will go towards -expenses related to Kavita. You can back us through OpenCollective. - -[![Donate via Paypal](https://img.shields.io/badge/donate-paypal-blue.svg?style=popout&logo=paypal)](https://paypal.me/majora2007?locale.x=en_US) +expenses related to Kavita. Back us through [OpenCollective](https://opencollective.com/Kavita#backer). ## Backers From f7f983045d26e4fa4582f2b6d99d8eba4bdfe58b Mon Sep 17 00:00:00 2001 From: Kizaing Date: Wed, 30 Jun 2021 16:46:05 -0400 Subject: [PATCH 55/55] Hotfix/docker file permissions (#350) * Fixed file pathing for automated docker builds * Added back in renaming the Kavita executable * Testing new method for copying wwwroot folder * Fixed file pathing * Fixed pathing to test * Potential fix for Docker file permissions Co-authored-by: Chris Plaatjes Co-authored-by: Boxbrite User --- .github/workflows/nightly-docker.yml | 11 ++--------- .github/workflows/stable-docker.yml | 18 ++++++------------ Dockerfile | 7 +++++-- copy_runtime.sh | 3 --- entrypoint.sh | 6 +++--- 5 files changed, 16 insertions(+), 29 deletions(-) diff --git a/.github/workflows/nightly-docker.yml b/.github/workflows/nightly-docker.yml index e7c841bb3..c39180b0c 100644 --- a/.github/workflows/nightly-docker.yml +++ b/.github/workflows/nightly-docker.yml @@ -1,4 +1,4 @@ -name: CI to Docker Hub +name: Build Nightly Docker on: push: @@ -25,8 +25,6 @@ jobs: with: node-version: '14' - run: | - echo 'Checking folder structure' - ls -l cd Kavita-webui/ || exit echo 'Installing web dependencies' @@ -35,14 +33,9 @@ jobs: echo 'Building UI' npm run prod - echo 'Checking dist folder for compiling integrity' - ls -l dist - echo 'Copying back to Kavita wwwroot' rsync -a dist/ ../API/wwwroot/ - - echo 'Checking Kavita wwwroot for copying integrity' - ls -l ../API/wwwroot + cd ../ || exit - name: Compile dotnet app diff --git a/.github/workflows/stable-docker.yml b/.github/workflows/stable-docker.yml index ed28452c5..3ac5b818f 100644 --- a/.github/workflows/stable-docker.yml +++ b/.github/workflows/stable-docker.yml @@ -1,4 +1,4 @@ -name: CI to Docker Hub +name: Build Stable Docker on: push: @@ -25,20 +25,17 @@ jobs: with: node-version: '14' - run: | - echo 'Removing old wwwroot' - rm -rf API/wwwroot/* + cd Kavita-webui/ || exit echo 'Installing web dependencies' npm install + echo 'Building UI' npm run prod - echo 'Checking dist folder for compiling integrity' - ls -l dist + echo 'Copying back to Kavita wwwroot' - mkdir ../API/wwwroot - cp -r dist/* ../API/wwwroot - echo 'Checking Kavita wwwroot for copying integrity' - ls -l ../API/wwwroot + rsync -a dist/ ../API/wwwroot/ + cd ../ || exit - name: Compile dotnet app @@ -69,9 +66,6 @@ jobs: push: true tags: kizaing/kavita:latest - - name: Image digest - run: echo ${{ steps.docker_build.outputs.digest }} - - name: Image digest run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/Dockerfile b/Dockerfile index 62dfe4336..4b1fb47b7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ #This Dockerfile creates a build for all architectures -#Production image +#Image that copies in the files and passes them to the main image FROM ubuntu:focal AS copytask ARG TARGETPLATFORM @@ -8,12 +8,15 @@ ARG TARGETPLATFORM #Move the output files to where they need to be RUN mkdir /files COPY _output/*.tar.gz /files/ +COPY Kavita-webui/dist /files/wwwroot COPY copy_runtime.sh /copy_runtime.sh RUN /copy_runtime.sh +#Production image FROM ubuntu:focal -COPY --from=copytask /kavita /kavita +COPY --from=copytask /Kavita /kavita +COPY --from=copytask /files/wwwroot /kavita/wwwroot #Installs program dependencies RUN apt-get update \ diff --git a/copy_runtime.sh b/copy_runtime.sh index f46a720f1..c7f2ccabd 100755 --- a/copy_runtime.sh +++ b/copy_runtime.sh @@ -7,13 +7,10 @@ set -xv if [ "$TARGETPLATFORM" == "linux/amd64" ] then tar xf /files/kavita-linux-x64.tar.gz -C / - mv /Kavita /kavita elif [ "$TARGETPLATFORM" == "linux/arm/v7" ] then tar xf /files/kavita-linux-arm.tar.gz -C / - mv /Kavita /kavita elif [ "$TARGETPLATFORM" == "linux/arm64" ] then tar xf /files/kavita-linux-arm64.tar.gz -C / - mv /Kavita /kavita fi diff --git a/entrypoint.sh b/entrypoint.sh index 87d10d6ec..aaa898a7c 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -13,7 +13,7 @@ then rm /kavita/appsettings.json ln -s /kavita/data/appsettings.json /kavita/ else - mv /kavita/appsettings.json /kavita/data/ + mv /kavita/appsettings.json /kavita/data/ || true ln -s /kavita/data/appsettings.json /kavita/ fi @@ -55,11 +55,11 @@ then else if [ -d /kavita/data/logs ] then - touch /kavita/data/logs/kavita.log + echo "" > /kavita/data/logs/kavita.log || true ln -s /kavita/data/logs/kavita.log /kavita/ else mkdir /kavita/data/logs - touch /kavita/data/logs/kavita.log + echo "" > /kavita/data/logs/kavita.log || true ln -s /kavita/data/logs/kavita.log /kavita/ fi