diff --git a/.gitignore b/.gitignore index bb124fc7f..584f0026e 100644 --- a/.gitignore +++ b/.gitignore @@ -520,6 +520,7 @@ UI/Web/dist/ /API/config/*.db /API/config/*.bak /API/config/*.backup +/API/config/*.csv /API/config/Hangfire.db /API/config/Hangfire-log.db API/config/covers/ diff --git a/API/Data/ManualMigrations/ManualMigrateLooseLeafChapters.cs b/API/Data/ManualMigrations/ManualMigrateLooseLeafChapters.cs new file mode 100644 index 000000000..370e189ba --- /dev/null +++ b/API/Data/ManualMigrations/ManualMigrateLooseLeafChapters.cs @@ -0,0 +1,159 @@ +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Entities; +using API.Extensions; +using API.Helpers.Builders; +using API.Services; +using API.Services.Tasks.Scanner.Parser; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Data.ManualMigrations; + + +/// +/// v0.8.0 migration to move loose leaf chapters into their own volume and retain user progress. +/// +public static class MigrateLooseLeafChapters +{ + public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger logger) + { + if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateLooseLeafChapters")) + { + return; + } + + logger.LogCritical( + "Running MigrateLooseLeafChapters migration - Please be patient, this may take some time. This is not an error"); + + var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + var extension = settings.EncodeMediaAs.GetExtension(); + + var progress = await dataContext.AppUserProgresses + .Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord + { + IsSpecial = c.IsSpecial, + AppUserId = p.AppUserId, + PagesRead = p.PagesRead, + Range = c.Range, + Number = c.Number, + MinNumber = c.MinNumber, + SeriesId = p.SeriesId, + VolumeId = p.VolumeId, + ProgressId = p.Id + }) + .Where(d => !d.IsSpecial) + .Join(dataContext.Volume, d => d.VolumeId, v => v.Id, (d, v) => new + { + ProgressRecord = d, + Volume = v + }) + .Where(d => d.Volume.Name == "0") + .ToListAsync(); + + // First, group all the progresses into different series + logger.LogCritical("Migrating {Count} progress events to new Volume structure for Loose leafs - This may take over 10 minutes depending on size of DB. Please wait", progress.Count); + var progressesGroupedBySeries = progress + .GroupBy(p => p.ProgressRecord.SeriesId); + + foreach (var seriesGroup in progressesGroupedBySeries) + { + // Get each series and move the loose leafs from the old volume to the new Volume + var seriesId = seriesGroup.Key; + + // Handle All Loose Leafs + var looseLeafsInSeries = seriesGroup + .Where(p => !p.ProgressRecord.IsSpecial) + .ToList(); + + // Get distinct Volumes by Id. For each one, create it then create the progress events + var distinctVolumes = looseLeafsInSeries.DistinctBy(d => d.Volume.Id); + foreach (var distinctVolume in distinctVolumes) + { + // Create a new volume for each series with the appropriate number (-100000) + var chapters = await dataContext.Chapter + .Where(c => c.VolumeId == distinctVolume.Volume.Id && !c.IsSpecial).ToListAsync(); + + var newVolume = new VolumeBuilder(Parser.LooseLeafVolume) + .WithSeriesId(seriesId) + .WithCreated(distinctVolume.Volume.Created) + .WithLastModified(distinctVolume.Volume.LastModified) + .Build(); + + newVolume.Pages = chapters.Sum(c => c.Pages); + newVolume.WordCount = chapters.Sum(c => c.WordCount); + newVolume.MinHoursToRead = chapters.Sum(c => c.MinHoursToRead); + newVolume.MaxHoursToRead = chapters.Sum(c => c.MaxHoursToRead); + newVolume.AvgHoursToRead = chapters.Sum(c => c.AvgHoursToRead); + dataContext.Volume.Add(newVolume); + await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId + + // Migrate the progress event to the new volume + var oldVolumeProgresses = await dataContext.AppUserProgresses + .Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync(); + foreach (var oldProgress in oldVolumeProgresses) + { + oldProgress.VolumeId = newVolume.Id; + } + + + logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}", + chapters.Count, distinctVolume.Volume.Id, newVolume.Id); + + // Move the loose leaf chapters from the old volume to the new Volume + foreach (var chapter in chapters) + { + // Update the VolumeId on the existing progress event + chapter.VolumeId = newVolume.Id; + + // We need to migrate cover images as well + //UpdateCoverImage(directoryService, logger, chapter, extension, newVolume); + } + + // Update the progress table with the new VolumeId + + await dataContext.SaveChangesAsync(); + } + } + + // Save changes after processing all series + if (dataContext.ChangeTracker.HasChanges()) + { + await dataContext.SaveChangesAsync(); + } + + + dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory() + { + Name = "MigrateLooseLeafChapters", + ProductVersion = BuildInfo.Version.ToString(), + RanAt = DateTime.UtcNow + }); + await dataContext.SaveChangesAsync(); + + logger.LogCritical( + "Running MigrateLooseLeafChapters migration - Completed. This is not an error"); + } + + private static void UpdateCoverImage(IDirectoryService directoryService, ILogger logger, Chapter chapter, + string extension, Volume newVolume) + { + var existingCover = ImageService.GetChapterFormat(chapter.Id, chapter.VolumeId) + extension; + var newCover = ImageService.GetChapterFormat(chapter.Id, newVolume.Id) + extension; + try + { + if (!chapter.CoverImageLocked) + { + // First rename existing cover + File.Copy(Path.Join(directoryService.CoverImageDirectory, existingCover), Path.Join(directoryService.CoverImageDirectory, newCover)); + chapter.CoverImage = newCover; + } + } catch (Exception ex) + { + logger.LogError(ex, "Unable to rename {OldCover} to {NewCover}, this cover will need manual refresh", existingCover, newCover); + } + } +} diff --git a/API/Data/ManualMigrations/ManualMigrateMixedSpecials.cs b/API/Data/ManualMigrations/ManualMigrateMixedSpecials.cs index c9c173eea..bba19b9ae 100644 --- a/API/Data/ManualMigrations/ManualMigrateMixedSpecials.cs +++ b/API/Data/ManualMigrations/ManualMigrateMixedSpecials.cs @@ -3,7 +3,9 @@ using System.IO; using System.Linq; using System.Threading.Tasks; using API.Entities; +using API.Extensions; using API.Helpers.Builders; +using API.Services; using API.Services.Tasks.Scanner.Parser; using Kavita.Common.EnvironmentInfo; using Microsoft.EntityFrameworkCore; @@ -21,6 +23,7 @@ public class UserProgressCsvRecord public float MinNumber { get; set; } public int SeriesId { get; set; } public int VolumeId { get; set; } + public int ProgressId { get; set; } } /// @@ -28,7 +31,7 @@ public class UserProgressCsvRecord /// public static class MigrateMixedSpecials { - public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, ILogger logger) + public static async Task Migrate(DataContext dataContext, IUnitOfWork unitOfWork, IDirectoryService directoryService, ILogger logger) { if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "ManualMigrateMixedSpecials")) { @@ -39,13 +42,13 @@ public static class MigrateMixedSpecials "Running ManualMigrateMixedSpecials migration - Please be patient, this may take some time. This is not an error"); // First, group all the progresses into different series - // Get each series and move the specials from old volume to the new Volume() - // Create a new progress event from existing and store the Id of existing progress event to delete it - // Save per series + var settings = await unitOfWork.SettingsRepository.GetSettingsDtoAsync(); + var extension = settings.EncodeMediaAs.GetExtension(); + var progress = await dataContext.AppUserProgresses .Join(dataContext.Chapter, p => p.ChapterId, c => c.Id, (p, c) => new UserProgressCsvRecord { @@ -56,10 +59,12 @@ public static class MigrateMixedSpecials Number = c.Number, MinNumber = c.MinNumber, SeriesId = p.SeriesId, - VolumeId = p.VolumeId + VolumeId = p.VolumeId, + ProgressId = p.Id }) .Where(d => d.IsSpecial || d.Number == "0") - .Join(dataContext.Volume, d => d.VolumeId, v => v.Id, (d, v) => new + .Join(dataContext.Volume, d => d.VolumeId, v => v.Id, + (d, v) => new { ProgressRecord = d, Volume = v @@ -68,18 +73,19 @@ public static class MigrateMixedSpecials .ToListAsync(); // First, group all the progresses into different series - logger.LogCritical("Migrating {Count} progress events to new Volume structure - This may take over 10 minutes depending on size of DB. Please wait", progress.Count); + logger.LogCritical("Migrating {Count} progress events to new Volume structure for Specials - This may take over 10 minutes depending on size of DB. Please wait", progress.Count); var progressesGroupedBySeries = progress.GroupBy(p => p.ProgressRecord.SeriesId); foreach (var seriesGroup in progressesGroupedBySeries) { // Get each series and move the specials from the old volume to the new Volume var seriesId = seriesGroup.Key; + + // Handle All Specials var specialsInSeries = seriesGroup .Where(p => p.ProgressRecord.IsSpecial) .ToList(); - // Get distinct Volumes by Id. For each one, create it then create the progress events var distinctVolumes = specialsInSeries.DistinctBy(d => d.Volume.Id); foreach (var distinctVolume in distinctVolumes) @@ -90,29 +96,43 @@ public static class MigrateMixedSpecials var newVolume = new VolumeBuilder(Parser.SpecialVolume) .WithSeriesId(seriesId) - .WithChapters(chapters) + .WithCreated(distinctVolume.Volume.Created) + .WithLastModified(distinctVolume.Volume.LastModified) .Build(); + + newVolume.Pages = chapters.Sum(c => c.Pages); + newVolume.WordCount = chapters.Sum(c => c.WordCount); + newVolume.MinHoursToRead = chapters.Sum(c => c.MinHoursToRead); + newVolume.MaxHoursToRead = chapters.Sum(c => c.MaxHoursToRead); + newVolume.AvgHoursToRead = chapters.Sum(c => c.AvgHoursToRead); + dataContext.Volume.Add(newVolume); await dataContext.SaveChangesAsync(); // Save changes to generate the newVolumeId // Migrate the progress event to the new volume - distinctVolume.ProgressRecord.VolumeId = newVolume.Id; + var oldVolumeProgresses = await dataContext.AppUserProgresses + .Where(p => p.VolumeId == distinctVolume.Volume.Id).ToListAsync(); + foreach (var oldProgress in oldVolumeProgresses) + { + oldProgress.VolumeId = newVolume.Id; + } logger.LogInformation("Moving {Count} chapters from Volume Id {OldVolumeId} to New Volume {NewVolumeId}", chapters.Count, distinctVolume.Volume.Id, newVolume.Id); - // Move the special chapters from the old volume to the new Volume - var specialChapters = await dataContext.Chapter - .Where(c => c.VolumeId == distinctVolume.ProgressRecord.VolumeId && c.IsSpecial) - .ToListAsync(); - foreach (var specialChapter in specialChapters) + // Move the special chapters from the old volume to the new Volume + foreach (var specialChapter in chapters) { // Update the VolumeId on the existing progress event specialChapter.VolumeId = newVolume.Id; + + //UpdateCoverImage(directoryService, logger, specialChapter, extension, newVolume); } await dataContext.SaveChangesAsync(); } + + } // Save changes after processing all series @@ -121,10 +141,6 @@ public static class MigrateMixedSpecials await dataContext.SaveChangesAsync(); } - // Update all Volumes with Name as "0" -> Special - logger.LogCritical("Updating all Volumes with Name 0 to SpecialNumber"); - - dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory() { @@ -137,4 +153,25 @@ public static class MigrateMixedSpecials logger.LogCritical( "Running ManualMigrateMixedSpecials migration - Completed. This is not an error"); } + + private static void UpdateCoverImage(IDirectoryService directoryService, ILogger logger, Chapter specialChapter, + string extension, Volume newVolume) + { + // We need to migrate cover images as well + var existingCover = ImageService.GetChapterFormat(specialChapter.Id, specialChapter.VolumeId) + extension; + var newCover = ImageService.GetChapterFormat(specialChapter.Id, newVolume.Id) + extension; + try + { + + if (!specialChapter.CoverImageLocked) + { + // First rename existing cover + File.Copy(Path.Join(directoryService.CoverImageDirectory, existingCover), Path.Join(directoryService.CoverImageDirectory, newCover)); + specialChapter.CoverImage = newCover; + } + } catch (Exception ex) + { + logger.LogError(ex, "Unable to rename {OldCover} to {NewCover}, this cover will need manual refresh", existingCover, newCover); + } + } } diff --git a/API/Data/ManualMigrations/MigrateProgressExport.cs b/API/Data/ManualMigrations/MigrateProgressExport.cs new file mode 100644 index 000000000..2482939c0 --- /dev/null +++ b/API/Data/ManualMigrations/MigrateProgressExport.cs @@ -0,0 +1,123 @@ +using System; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using API.Entities; +using API.Services; +using CsvHelper; +using CsvHelper.Configuration.Attributes; +using Kavita.Common.EnvironmentInfo; +using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; + +namespace API.Data.ManualMigrations; + +public class ProgressExport +{ + [Name("Library Id")] + public int LibraryId { get; set; } + + [Name("Library Name")] + public string LibraryName { get; set; } + + [Name("Series Name")] + public string SeriesName { get; set; } + + [Name("Volume Number")] + public string VolumeRange { get; set; } + + [Name("Volume LookupName")] + public string VolumeLookupName { get; set; } + + [Name("Chapter Number")] + public string ChapterRange { get; set; } + + [Name("FileName")] + public string MangaFileName { get; set; } + + [Name("FilePath")] + public string MangaFilePath { get; set; } + + [Name("AppUser Name")] + public string AppUserName { get; set; } + + [Name("AppUser Id")] + public int AppUserId { get; set; } + + [Name("Pages Read")] + public int PagesRead { get; set; } + + [Name("BookScrollId")] + public string BookScrollId { get; set; } + + [Name("Progress Created")] + public DateTime Created { get; set; } + + [Name("Progress LastModified")] + public DateTime LastModified { get; set; } +} + +/// +/// v0.8.0 - Progress is extracted and saved in a csv +/// +public static class MigrateProgressExport +{ + public static async Task Migrate(DataContext dataContext, IDirectoryService directoryService, ILogger logger) + { + try + { + if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateProgressExport")) + { + return; + } + + logger.LogCritical( + "Running MigrateProgressExport migration - Please be patient, this may take some time. This is not an error"); + + var data = await dataContext.AppUserProgresses + .Join(dataContext.Series, progress => progress.SeriesId, series => series.Id, (progress, series) => new { progress, series }) + .Join(dataContext.Volume, ps => ps.progress.VolumeId, volume => volume.Id, (ps, volume) => new { ps.progress, ps.series, volume }) + .Join(dataContext.Chapter, psv => psv.progress.ChapterId, chapter => chapter.Id, (psv, chapter) => new { psv.progress, psv.series, psv.volume, chapter }) + .Join(dataContext.MangaFile, psvc => psvc.chapter.Id, mangaFile => mangaFile.ChapterId, (psvc, mangaFile) => new { psvc.progress, psvc.series, psvc.volume, psvc.chapter, mangaFile }) + .Join(dataContext.AppUser, psvcm => psvcm.progress.AppUserId, appUser => appUser.Id, (psvcm, appUser) => new + { + LibraryId = psvcm.series.LibraryId, + LibraryName = psvcm.series.Library.Name, + SeriesName = psvcm.series.Name, + VolumeRange = psvcm.volume.MinNumber + "-" + psvcm.volume.MaxNumber, + VolumeLookupName = psvcm.volume.Name, + ChapterRange = psvcm.chapter.Range, + MangaFileName = psvcm.mangaFile.FileName, + MangaFilePath = psvcm.mangaFile.FilePath, + AppUserName = appUser.UserName, + AppUserId = appUser.Id, + PagesRead = psvcm.progress.PagesRead, + BookScrollId = psvcm.progress.BookScrollId, + ProgressCreated = psvcm.progress.Created, + ProgressLastModified = psvcm.progress.LastModified + }).ToListAsync(); + + + // Write the mapped data to a CSV file + await using var writer = new StreamWriter(Path.Join(directoryService.ConfigDirectory, "progress_export.csv")); + await using var csv = new CsvWriter(writer, CultureInfo.InvariantCulture); + await csv.WriteRecordsAsync(data); + + logger.LogCritical( + "Running MigrateProgressExport migration - Completed. This is not an error"); + } + catch (Exception ex) + { + // On new installs, the db isn't setup yet, so this has nothing to do + } + + dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory() + { + Name = "MigrateProgressExport", + ProductVersion = BuildInfo.Version.ToString(), + RanAt = DateTime.UtcNow + }); + await dataContext.SaveChangesAsync(); + } +} diff --git a/API/Helpers/Builders/MangaFileBuilder.cs b/API/Helpers/Builders/MangaFileBuilder.cs index 584de4398..5387a3349 100644 --- a/API/Helpers/Builders/MangaFileBuilder.cs +++ b/API/Helpers/Builders/MangaFileBuilder.cs @@ -15,7 +15,7 @@ public class MangaFileBuilder : IEntityBuilder { _mangaFile = new MangaFile() { - FilePath = filePath, + FilePath = Parser.NormalizePath(filePath), Format = format, Pages = pages, LastModified = File.GetLastWriteTime(filePath), diff --git a/API/Helpers/Builders/VolumeBuilder.cs b/API/Helpers/Builders/VolumeBuilder.cs index 724bd894a..8d98844aa 100644 --- a/API/Helpers/Builders/VolumeBuilder.cs +++ b/API/Helpers/Builders/VolumeBuilder.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Linq; using API.Data; using API.Entities; @@ -75,4 +76,18 @@ public class VolumeBuilder : IEntityBuilder _volume.CoverImage = cover; return this; } + + public VolumeBuilder WithCreated(DateTime created) + { + _volume.Created = created; + _volume.CreatedUtc = created.ToUniversalTime(); + return this; + } + + public VolumeBuilder WithLastModified(DateTime lastModified) + { + _volume.LastModified = lastModified; + _volume.LastModifiedUtc = lastModified.ToUniversalTime(); + return this; + } } diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs index 498e59d47..84c45f07a 100644 --- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs +++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs @@ -166,13 +166,17 @@ public class ParseScannedFiles } normalizedPath = Parser.Parser.NormalizePath(folderPath); + var libraryRoot = + library.Folders.FirstOrDefault(f => + Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ?? + folderPath; if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck)) { result.Add(new ScanResult() { Files = ArraySegment.Empty, Folder = folderPath, - LibraryRoot = folderPath, + LibraryRoot = libraryRoot, HasChanged = false }); } @@ -181,7 +185,7 @@ public class ParseScannedFiles { Files = _directoryService.ScanFiles(folderPath, fileExtensions), Folder = folderPath, - LibraryRoot = folderPath, + LibraryRoot = libraryRoot, HasChanged = true }); diff --git a/API/Services/Tasks/Scanner/ProcessSeries.cs b/API/Services/Tasks/Scanner/ProcessSeries.cs index a6caa37de..dcddcee85 100644 --- a/API/Services/Tasks/Scanner/ProcessSeries.cs +++ b/API/Services/Tasks/Scanner/ProcessSeries.cs @@ -724,6 +724,7 @@ public class ProcessSeries : IProcessSeries existingFile.Pages = _readingItemService.GetNumberOfPages(info.FullFilePath, info.Format); existingFile.Extension = fileInfo.Extension.ToLowerInvariant(); existingFile.FileName = Parser.Parser.RemoveExtensionIfSupported(existingFile.FilePath); + existingFile.FilePath = Parser.Parser.NormalizePath(existingFile.FilePath); existingFile.Bytes = fileInfo.Length; // We skip updating DB here with last modified time so that metadata refresh can do it } diff --git a/API/Startup.cs b/API/Startup.cs index 740e59af5..a1aec4ab6 100644 --- a/API/Startup.cs +++ b/API/Startup.cs @@ -255,7 +255,9 @@ public class Startup // v0.8.0 await MigrateVolumeLookupName.Migrate(dataContext, unitOfWork, logger); await MigrateChapterNumber.Migrate(dataContext, logger); - await MigrateMixedSpecials.Migrate(dataContext, unitOfWork, logger); + await MigrateProgressExport.Migrate(dataContext, directoryService, logger); + await MigrateMixedSpecials.Migrate(dataContext, unitOfWork, directoryService, logger); + await MigrateLooseLeafChapters.Migrate(dataContext, unitOfWork, directoryService, logger); await MigrateChapterFields.Migrate(dataContext, unitOfWork, logger); await MigrateChapterRange.Migrate(dataContext, unitOfWork, logger); diff --git a/UI/Web/package-lock.json b/UI/Web/package-lock.json index cf3848657..3bfec9184 100644 --- a/UI/Web/package-lock.json +++ b/UI/Web/package-lock.json @@ -688,6 +688,7 @@ "version": "17.3.0", "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-17.3.0.tgz", "integrity": "sha512-ewo+pb0QUC69Ey15z4vPteoBeO81HitqplysOoeXbyVBjMnKmZl3343wx7ukgcI97lmj4d38d1r4AnIoO5n/Vw==", + "dev": true, "dependencies": { "@babel/core": "7.23.9", "@jridgewell/sourcemap-codec": "^1.4.14", @@ -5752,6 +5753,7 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" @@ -6013,6 +6015,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, "engines": { "node": ">=8" } @@ -6323,6 +6326,7 @@ "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, "funding": [ { "type": "individual", @@ -6571,7 +6575,8 @@ "node_modules/convert-source-map": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==" + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true }, "node_modules/cookie": { "version": "0.5.0", @@ -7473,6 +7478,7 @@ "version": "0.1.13", "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, "optional": true, "dependencies": { "iconv-lite": "^0.6.2" @@ -7482,6 +7488,7 @@ "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, "optional": true, "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" @@ -8558,6 +8565,7 @@ "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, "hasInstallScript": true, "optional": true, "os": [ @@ -9332,6 +9340,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, "dependencies": { "binary-extensions": "^2.0.0" }, @@ -11115,6 +11124,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, "engines": { "node": ">=0.10.0" } @@ -12527,6 +12537,7 @@ "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, "dependencies": { "picomatch": "^2.2.1" }, @@ -12537,7 +12548,8 @@ "node_modules/reflect-metadata": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/reflect-metadata/-/reflect-metadata-0.2.1.tgz", - "integrity": "sha512-i5lLI6iw9AU3Uu4szRNPPEkomnkjRTaVt9hy/bn5g/oSzekBSMeLZblcjP74AW0vBabqERLLIrz+gR8QYR54Tw==" + "integrity": "sha512-i5lLI6iw9AU3Uu4szRNPPEkomnkjRTaVt9hy/bn5g/oSzekBSMeLZblcjP74AW0vBabqERLLIrz+gR8QYR54Tw==", + "dev": true }, "node_modules/regenerate": { "version": "1.4.2", @@ -12989,7 +13001,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "devOptional": true + "dev": true }, "node_modules/sass": { "version": "1.71.1", @@ -13108,6 +13120,7 @@ "version": "7.5.4", "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, @@ -13122,6 +13135,7 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, "dependencies": { "yallist": "^4.0.0" }, @@ -13132,7 +13146,8 @@ "node_modules/semver/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true }, "node_modules/send": { "version": "0.18.0", @@ -14244,6 +14259,7 @@ "version": "5.2.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "dev": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/openapi.json b/openapi.json index 58431b2cd..51afe8888 100644 --- a/openapi.json +++ b/openapi.json @@ -7,7 +7,7 @@ "name": "GPL-3.0", "url": "https://github.com/Kareadita/Kavita/blob/develop/LICENSE" }, - "version": "0.7.14.10" + "version": "0.7.14.5" }, "servers": [ {