mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-05-24 00:52:23 -04:00
Scanner Fixes (#2800)
This commit is contained in:
parent
123917fbec
commit
8167fc5a4f
@ -123,7 +123,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Beelzebub\Beelzebub_01_[Noodles].zip";
|
||||
filepath = @"E:/Manga/Beelzebub/Beelzebub_01_[Noodles].zip";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Beelzebub", Volumes = Parser.LooseLeafVolume,
|
||||
@ -132,7 +132,7 @@ public class DefaultParserTests
|
||||
});
|
||||
|
||||
// Note: Lots of duplicates here. I think I can move them to the ParserTests itself
|
||||
filepath = @"E:\Manga\Ichinensei ni Nacchattara\Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||
filepath = @"E:/Manga/Ichinensei ni Nacchattara/Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Ichinensei ni Nacchattara", Volumes = "1",
|
||||
@ -140,7 +140,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Tenjo Tenge (Color)\Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
||||
filepath = @"E:/Manga/Tenjo Tenge (Color)/Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Tenjo Tenge {Full Contact Edition}", Volumes = "1", Edition = "",
|
||||
@ -148,7 +148,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)\Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
|
||||
filepath = @"E:/Manga/Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)/Akame ga KILL! ZERO v01 (2016) (Digital) (LuCaZ).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Akame ga KILL! ZERO", Volumes = "1", Edition = "",
|
||||
@ -156,7 +156,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Dorohedoro\Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
|
||||
filepath = @"E:/Manga/Dorohedoro/Dorohedoro v01 (2010) (Digital) (LostNerevarine-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Dorohedoro", Volumes = "1", Edition = "",
|
||||
@ -164,7 +164,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\APOSIMZ\APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
|
||||
filepath = @"E:/Manga/APOSIMZ/APOSIMZ 040 (2020) (Digital) (danke-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "APOSIMZ", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
@ -172,7 +172,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Corpse Party Musume\Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
||||
filepath = @"E:/Manga/Corpse Party Musume/Kedouin Makoto - Corpse Party Musume, Chapter 09.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Kedouin Makoto - Corpse Party Musume", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
@ -180,7 +180,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Goblin Slayer\Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
|
||||
filepath = @"E:/Manga/Goblin Slayer/Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Goblin Slayer - Brand New Day", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
@ -188,7 +188,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Summer Time Rendering\Specials\Record 014 (between chapter 083 and ch084) SP11.cbr";
|
||||
filepath = @"E:/Manga/Summer Time Rendering/Specials/Record 014 (between chapter 083 and ch084) SP11.cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Summer Time Rendering", Volumes = API.Services.Tasks.Scanner.Parser.Parser.SpecialVolume, Edition = "",
|
||||
@ -196,7 +196,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = true
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Seraph of the End\Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
|
||||
filepath = @"E:/Manga/Seraph of the End/Seraph of the End - Vampire Reign 093 (2020) (Digital) (LuCaZ).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Seraph of the End - Vampire Reign", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
@ -204,7 +204,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Kono Subarashii Sekai ni Bakuen wo!\Vol. 00 Ch. 000.cbz";
|
||||
filepath = @"E:/Manga/Kono Subarashii Sekai ni Bakuen wo!/Vol. 00 Ch. 000.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Kono Subarashii Sekai ni Bakuen wo!", Volumes = "0", Edition = "",
|
||||
@ -212,7 +212,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Toukyou Akazukin\Vol. 01 Ch. 001.cbz";
|
||||
filepath = @"E:/Manga/Toukyou Akazukin/Vol. 01 Ch. 001.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Toukyou Akazukin", Volumes = "1", Edition = "",
|
||||
@ -221,10 +221,10 @@ public class DefaultParserTests
|
||||
});
|
||||
|
||||
// If an image is cover exclusively, ignore it
|
||||
filepath = @"E:\Manga\Seraph of the End\cover.png";
|
||||
filepath = @"E:/Manga/Seraph of the End/cover.png";
|
||||
expected.Add(filepath, null);
|
||||
|
||||
filepath = @"E:\Manga\The Beginning After the End\Chapter 001.cbz";
|
||||
filepath = @"E:/Manga/The Beginning After the End/Chapter 001.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "The Beginning After the End", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
@ -232,7 +232,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Air Gear\Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
|
||||
filepath = @"E:/Manga/Air Gear/Air Gear Omnibus v01 (2016) (Digital) (Shadowcat-Empire).cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Air Gear", Volumes = "1", Edition = "Omnibus",
|
||||
@ -240,7 +240,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||
filepath = @"E:/Manga/Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
|
||||
@ -279,17 +279,17 @@ public class DefaultParserTests
|
||||
//[Fact]
|
||||
public void Parse_ParseInfo_Manga_ImageOnly()
|
||||
{
|
||||
// Images don't have root path as E:\Manga, but rather as the path of the folder
|
||||
// Images don't have root path as E:/Manga, but rather as the path of the folder
|
||||
|
||||
// Note: Fallback to folder will parse Monster #8 and get Monster
|
||||
var filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
|
||||
var filepath = @"E:/Manga/Monster #8/Ch. 001-016 [MangaPlus] [Digital] [amit34521]/Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]/13.jpg";
|
||||
var expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Monster #8", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
Chapters = "8", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
var actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Monster #8", "E:/Manga", LibraryType.Manga, null);
|
||||
var actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Monster #8", "E:/Manga", LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
@ -307,7 +307,7 @@ public class DefaultParserTests
|
||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||
filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Vol19/ch. 186/Vol. 19 p106.gif";
|
||||
expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
@ -315,7 +315,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga",LibraryType.Manga, null);
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga",LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
@ -333,7 +333,7 @@ public class DefaultParserTests
|
||||
Assert.Equal(expectedInfo2.FullFilePath, actual2.FullFilePath);
|
||||
_testOutputHelper.WriteLine("FullFilePath ✓");
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch. 186\Vol. 19 p106.gif";
|
||||
filepath = @"E:/Manga/Extra layer for no reason/Just Images the second/Blank Folder/Vol19/ch. 186/Vol. 19 p106.gif";
|
||||
expectedInfo2 = new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
@ -341,7 +341,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
};
|
||||
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:\Manga\Extra layer for no reason\", "E:/Manga", LibraryType.Manga, null);
|
||||
actual2 = _defaultParser.Parse(filepath, @"E:/Manga/Extra layer for no reason/", "E:/Manga", LibraryType.Manga, null);
|
||||
Assert.NotNull(actual2);
|
||||
_testOutputHelper.WriteLine($"Validating {filepath}");
|
||||
Assert.Equal(expectedInfo2.Format, actual2.Format);
|
||||
@ -448,7 +448,7 @@ public class DefaultParserTests
|
||||
});
|
||||
|
||||
// Fallback test with bad naming
|
||||
filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
|
||||
filepath = @"E:/Comics/Comics/Babe/Babe Vol.1 #1-4/Babe 01.cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Babe", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
@ -456,7 +456,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Comics\Comics\Publisher\Batman the Detective (2021)\Batman the Detective - v6 - 11 - (2021).cbr";
|
||||
filepath = @"E:/Comics/Comics/Publisher/Batman the Detective (2021)/Batman the Detective - v6 - 11 - (2021).cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Batman the Detective", Volumes = "6", Edition = "",
|
||||
@ -464,7 +464,7 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Comics\Comics\Batman - The Man Who Laughs #1 (2005)\Batman - The Man Who Laughs #1 (2005).cbr";
|
||||
filepath = @"E:/Comics/Comics/Batman - The Man Who Laughs #1 (2005)/Batman - The Man Who Laughs #1 (2005).cbr";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Batman - The Man Who Laughs", Volumes = API.Services.Tasks.Scanner.Parser.Parser.LooseLeafVolume, Edition = "",
|
||||
|
@ -166,11 +166,36 @@ public class LibraryController : BaseApiController
|
||||
return Ok(_directoryService.ListDirectory(path));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Return a specific library
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
[Authorize(Policy = "RequireAdminRole")]
|
||||
[HttpGet]
|
||||
public async Task<ActionResult<LibraryDto?>> GetLibrary(int libraryId)
|
||||
{
|
||||
var username = User.GetUsername();
|
||||
if (string.IsNullOrEmpty(username)) return Unauthorized();
|
||||
|
||||
var cacheKey = CacheKey + username;
|
||||
var result = await _libraryCacheProvider.GetAsync<IEnumerable<LibraryDto>>(cacheKey);
|
||||
if (result.HasValue)
|
||||
{
|
||||
return Ok(result.Value.FirstOrDefault(l => l.Id == libraryId));
|
||||
}
|
||||
|
||||
var ret = _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(username).ToList();
|
||||
await _libraryCacheProvider.SetAsync(CacheKey, ret, TimeSpan.FromHours(24));
|
||||
_logger.LogDebug("Caching libraries for {Key}", cacheKey);
|
||||
|
||||
return Ok(ret.Find(l => l.Id == libraryId));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Return all libraries in the Server
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
[HttpGet]
|
||||
[HttpGet("libraries")]
|
||||
public async Task<ActionResult<IEnumerable<LibraryDto>>> GetLibraries()
|
||||
{
|
||||
var username = User.GetUsername();
|
||||
|
@ -221,18 +221,18 @@ public class ServerController : BaseApiController
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
[HttpGet("jobs")]
|
||||
public ActionResult<IEnumerable<JobDto>> GetJobs()
|
||||
public async Task<ActionResult<IEnumerable<JobDto>>> GetJobs()
|
||||
{
|
||||
var recurringJobs = JobStorage.Current.GetConnection().GetRecurringJobs().Select(
|
||||
dto =>
|
||||
new JobDto() {
|
||||
Id = dto.Id,
|
||||
Title = dto.Id.Replace('-', ' '),
|
||||
Cron = dto.Cron,
|
||||
LastExecutionUtc = dto.LastExecution.HasValue ? new DateTime(dto.LastExecution.Value.Ticks, DateTimeKind.Utc) : null
|
||||
});
|
||||
var jobDtoTasks = JobStorage.Current.GetConnection().GetRecurringJobs().Select(async dto =>
|
||||
new JobDto()
|
||||
{
|
||||
Id = dto.Id,
|
||||
Title = await _localizationService.Translate(User.GetUserId(), dto.Id),
|
||||
Cron = dto.Cron,
|
||||
LastExecutionUtc = dto.LastExecution.HasValue ? new DateTime(dto.LastExecution.Value.Ticks, DateTimeKind.Utc) : null
|
||||
});
|
||||
|
||||
return Ok(recurringJobs);
|
||||
return Ok(await Task.WhenAll(jobDtoTasks));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
45
API/Data/ManualMigrations/MigrateMangaFilePath.cs
Normal file
45
API/Data/ManualMigrations/MigrateMangaFilePath.cs
Normal file
@ -0,0 +1,45 @@
|
||||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities;
|
||||
using API.Services.Tasks.Scanner.Parser;
|
||||
using Kavita.Common.EnvironmentInfo;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Data.ManualMigrations;
|
||||
|
||||
/// <summary>
|
||||
/// v0.8.0 ensured that MangaFile Path is normalized. This will normalize existing data to avoid churn.
|
||||
/// </summary>
|
||||
public static class MigrateMangaFilePath
|
||||
{
|
||||
public static async Task Migrate(DataContext dataContext, ILogger<Program> logger)
|
||||
{
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateMangaFilePath"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateMangaFilePath migration - Please be patient, this may take some time. This is not an error");
|
||||
|
||||
|
||||
foreach(var file in dataContext.MangaFile)
|
||||
{
|
||||
file.FilePath = Parser.NormalizePath(file.FilePath);
|
||||
}
|
||||
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
dataContext.ManualMigrationHistory.Add(new ManualMigrationHistory()
|
||||
{
|
||||
Name = "MigrateMangaFilePath",
|
||||
ProductVersion = BuildInfo.Version.ToString(),
|
||||
RanAt = DateTime.UtcNow
|
||||
});
|
||||
await dataContext.SaveChangesAsync();
|
||||
|
||||
logger.LogCritical(
|
||||
"Running MigrateMangaFilePath migration - Completed. This is not an error");
|
||||
}
|
||||
}
|
@ -20,6 +20,7 @@ public static class MigrateWantToReadExport
|
||||
{
|
||||
try
|
||||
{
|
||||
|
||||
if (await dataContext.ManualMigrationHistory.AnyAsync(m => m.Name == "MigrateWantToReadExport"))
|
||||
{
|
||||
return;
|
||||
|
@ -1,4 +1,5 @@
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using API.Entities;
|
||||
|
||||
namespace API.Helpers;
|
||||
@ -46,6 +47,7 @@ public static class OrderableHelper
|
||||
|
||||
public static void ReorderItems(List<ReadingListItem> items, int readingListItemId, int toPosition)
|
||||
{
|
||||
if (toPosition < 0) throw new ArgumentException("toPosition cannot be less than 0");
|
||||
var item = items.Find(r => r.Id == readingListItemId);
|
||||
if (item != null)
|
||||
{
|
||||
|
@ -200,8 +200,19 @@
|
||||
"volume-num": "Volume {0}",
|
||||
"book-num": "Book {0}",
|
||||
"issue-num": "Issue {0}{1}",
|
||||
"chapter-num": "Chapter {0}"
|
||||
|
||||
"chapter-num": "Chapter {0}",
|
||||
|
||||
"check-updates": "Check Updates",
|
||||
"license-check": "License Check",
|
||||
"process-scrobbling-events": "Process Scrobbling Events",
|
||||
"report-stats": "Report Stats",
|
||||
"check-scrobbling-tokens": "Check Scrobbling Tokens",
|
||||
"cleanup": "Cleanup",
|
||||
"process-processed-scrobbling-events": "Process Processed Scrobbling Events",
|
||||
"remove-from-want-to-read": "Want to Read Cleanup",
|
||||
"scan-libraries": "Scan Libraries",
|
||||
"kavita+-data-refresh": "Kavita+ Data Refresh",
|
||||
"backup": "Backup",
|
||||
"update-yearly-stats": "Update Yearly Stats"
|
||||
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class Program
|
||||
}
|
||||
|
||||
// Apply Before manual migrations that need to run before actual migrations
|
||||
try
|
||||
if (isDbCreated)
|
||||
{
|
||||
Task.Run(async () =>
|
||||
{
|
||||
@ -96,17 +96,22 @@ public class Program
|
||||
logger.LogInformation("Running Migrations");
|
||||
|
||||
// v0.7.14
|
||||
await MigrateWantToReadExport.Migrate(context, directoryService, logger);
|
||||
try
|
||||
{
|
||||
await MigrateWantToReadExport.Migrate(context, directoryService, logger);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
/* Swallow */
|
||||
}
|
||||
|
||||
await unitOfWork.CommitAsync();
|
||||
logger.LogInformation("Running Migrations - complete");
|
||||
}).GetAwaiter()
|
||||
.GetResult();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogCritical(ex, "An error occurred during migration");
|
||||
}
|
||||
|
||||
|
||||
|
||||
await context.Database.MigrateAsync();
|
||||
|
||||
|
@ -353,7 +353,15 @@ public class ArchiveService : IArchiveService
|
||||
{
|
||||
var tempPath = Path.Join(tempLocation, _directoryService.FileSystem.Path.GetFileNameWithoutExtension(_directoryService.FileSystem.FileInfo.New(path).Name));
|
||||
progressCallback(Tuple.Create(_directoryService.FileSystem.FileInfo.New(path).Name, (1.0f * totalFiles) / count));
|
||||
ExtractArchive(path, tempPath);
|
||||
if (Tasks.Scanner.Parser.Parser.IsArchive(path))
|
||||
{
|
||||
ExtractArchive(path, tempPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
_directoryService.CopyFileToDirectory(path, tempPath);
|
||||
}
|
||||
|
||||
count++;
|
||||
}
|
||||
}
|
||||
@ -392,7 +400,7 @@ public class ArchiveService : IArchiveService
|
||||
return false;
|
||||
}
|
||||
|
||||
if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath) || Tasks.Scanner.Parser.Parser.IsEpub(archivePath)) return true;
|
||||
if (Tasks.Scanner.Parser.Parser.IsArchive(archivePath)) return true;
|
||||
|
||||
_logger.LogWarning("Archive {ArchivePath} is not a valid archive", archivePath);
|
||||
return false;
|
||||
|
@ -781,7 +781,7 @@ public class BookService : IBookService
|
||||
/// <returns></returns>
|
||||
public ParserInfo? ParseInfo(string filePath)
|
||||
{
|
||||
if (!Parser.IsEpub(filePath)) return null;
|
||||
if (!Parser.IsEpub(filePath) || !_directoryService.FileSystem.File.Exists(filePath)) return null;
|
||||
|
||||
try
|
||||
{
|
||||
@ -848,7 +848,7 @@ public class BookService : IBookService
|
||||
Format = MangaFormat.Epub,
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Title = specialName?.Trim() ?? string.Empty,
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
IsSpecial = false,
|
||||
Series = series.Trim(),
|
||||
SeriesSort = series.Trim(),
|
||||
@ -870,7 +870,7 @@ public class BookService : IBookService
|
||||
Format = MangaFormat.Epub,
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Title = epubBook.Title.Trim(),
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
IsSpecial = false,
|
||||
Series = epubBook.Title.Trim(),
|
||||
Volumes = Parser.LooseLeafVolume,
|
||||
|
@ -440,22 +440,25 @@ public class ScrobblingService : IScrobblingService
|
||||
// Might want to log this under ScrobbleError
|
||||
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Too Many Requests"))
|
||||
{
|
||||
_logger.LogInformation("Hit Too many requests, sleeping to regain requests");
|
||||
_logger.LogInformation("Hit Too many requests, sleeping to regain requests and retrying");
|
||||
await Task.Delay(TimeSpan.FromMinutes(10));
|
||||
} else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unauthorized"))
|
||||
return await PostScrobbleUpdate(data, license, evt);
|
||||
}
|
||||
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unauthorized"))
|
||||
{
|
||||
_logger.LogCritical("Kavita+ responded with Unauthorized. Please check your subscription");
|
||||
await _licenseService.HasActiveLicense(true);
|
||||
evt.IsErrored = true;
|
||||
evt.ErrorDetails = "Kavita+ subscription no longer active";
|
||||
throw new KavitaException("Kavita+ responded with Unauthorized. Please check your subscription");
|
||||
} else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Access token is invalid"))
|
||||
}
|
||||
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Access token is invalid"))
|
||||
{
|
||||
evt.IsErrored = true;
|
||||
evt.ErrorDetails = AccessTokenErrorMessage;
|
||||
throw new KavitaException("Access token is invalid");
|
||||
}
|
||||
else if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series"))
|
||||
if (response.ErrorMessage != null && response.ErrorMessage.Contains("Unknown Series"))
|
||||
{
|
||||
// Log the Series name and Id in ScrobbleErrors
|
||||
_logger.LogInformation("Kavita+ was unable to match the series");
|
||||
@ -490,10 +493,6 @@ public class ScrobblingService : IScrobblingService
|
||||
evt.IsErrored = true;
|
||||
evt.ErrorDetails = "Review was unable to be saved due to upstream requirements";
|
||||
}
|
||||
|
||||
evt.IsErrored = true;
|
||||
_logger.LogError("Scrobbling failed due to {ErrorMessage}: {SeriesName}", response.ErrorMessage, data.SeriesName);
|
||||
throw new KavitaException($"Scrobbling failed due to {response.ErrorMessage}: {data.SeriesName}");
|
||||
}
|
||||
|
||||
return response.RateLeft;
|
||||
|
@ -38,11 +38,12 @@ public class ReadingItemService : IReadingItemService
|
||||
_directoryService = directoryService;
|
||||
_logger = logger;
|
||||
|
||||
_comicVineParser = new ComicVineParser(directoryService);
|
||||
_imageParser = new ImageParser(directoryService);
|
||||
_bookParser = new BookParser(directoryService, bookService, _basicParser);
|
||||
_pdfParser = new PdfParser(directoryService);
|
||||
_basicParser = new BasicParser(directoryService, _imageParser);
|
||||
_bookParser = new BookParser(directoryService, bookService, _basicParser);
|
||||
_comicVineParser = new ComicVineParser(directoryService);
|
||||
_pdfParser = new PdfParser(directoryService);
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@ -73,14 +74,22 @@ public class ReadingItemService : IReadingItemService
|
||||
/// <param name="type">Library type to determine parsing to perform</param>
|
||||
public ParserInfo? ParseFile(string path, string rootPath, string libraryRoot, LibraryType type)
|
||||
{
|
||||
var info = Parse(path, rootPath, libraryRoot, type);
|
||||
if (info == null)
|
||||
try
|
||||
{
|
||||
_logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path);
|
||||
var info = Parse(path, rootPath, libraryRoot, type);
|
||||
if (info == null)
|
||||
{
|
||||
_logger.LogError("Unable to parse any meaningful information out of file {FilePath}", path);
|
||||
return null;
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an exception when parsing file {FilePath}", path);
|
||||
return null;
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -578,6 +578,13 @@ public class SeriesService : ISeriesService
|
||||
return !chapter.IsSpecial && chapter.MinNumber.IsNot(Parser.DefaultChapterNumber);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Should the volume be included and if so, this renames
|
||||
/// </summary>
|
||||
/// <param name="volume"></param>
|
||||
/// <param name="libraryType"></param>
|
||||
/// <param name="volumeLabel"></param>
|
||||
/// <returns></returns>
|
||||
public static bool RenameVolumeName(VolumeDto volume, LibraryType libraryType, string volumeLabel = "Volume")
|
||||
{
|
||||
if (libraryType is LibraryType.Book or LibraryType.LightNovel)
|
||||
|
@ -336,7 +336,7 @@ public class TaskScheduler : ITaskScheduler
|
||||
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
|
||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, force, true));
|
||||
// When we do a scan, force cache to re-unpack in case page numbers change
|
||||
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheAndTempDirectories());
|
||||
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
|
||||
}
|
||||
|
||||
public void TurnOnScrobbling(int userId = 0)
|
||||
|
@ -104,8 +104,13 @@ public class BackupService : IBackupService
|
||||
_directoryService.ExistOrCreate(tempDirectory);
|
||||
_directoryService.ClearDirectory(tempDirectory);
|
||||
|
||||
await SendProgress(0.1F, "Copying config files");
|
||||
_directoryService.CopyFilesToDirectory(
|
||||
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)).ToList(), tempDirectory);
|
||||
_backupFiles.Select(file => _directoryService.FileSystem.Path.Join(_directoryService.ConfigDirectory, file)), tempDirectory);
|
||||
|
||||
// Copy any csv's as those are used for manual migrations
|
||||
_directoryService.CopyFilesToDirectory(
|
||||
_directoryService.GetFilesWithCertainExtensions(_directoryService.ConfigDirectory, @"\.csv"), tempDirectory);
|
||||
|
||||
await SendProgress(0.2F, "Copying logs");
|
||||
CopyLogsToBackupDirectory(tempDirectory);
|
||||
|
@ -20,6 +20,7 @@ public interface ICleanupService
|
||||
Task Cleanup();
|
||||
Task CleanupDbEntries();
|
||||
void CleanupCacheAndTempDirectories();
|
||||
void CleanupCacheDirectory();
|
||||
Task DeleteSeriesCoverImages();
|
||||
Task DeleteChapterCoverImages();
|
||||
Task DeleteTagCoverImages();
|
||||
@ -178,6 +179,23 @@ public class CleanupService : ICleanupService
|
||||
_logger.LogInformation("Cache and temp directory purged");
|
||||
}
|
||||
|
||||
public void CleanupCacheDirectory()
|
||||
{
|
||||
_logger.LogInformation("Performing cleanup of Cache directories");
|
||||
_directoryService.ExistOrCreate(_directoryService.CacheDirectory);
|
||||
|
||||
try
|
||||
{
|
||||
_directoryService.ClearDirectory(_directoryService.CacheDirectory);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an issue deleting one or more folders/files during cleanup");
|
||||
}
|
||||
|
||||
_logger.LogInformation("Cache directory purged");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes Database backups older than configured total backups. If all backups are older than total backups days, only the latest is kept.
|
||||
/// </summary>
|
||||
|
@ -170,6 +170,7 @@ public class ParseScannedFiles
|
||||
library.Folders.FirstOrDefault(f =>
|
||||
Parser.Parser.NormalizePath(folderPath).Contains(Parser.Parser.NormalizePath(f.Path)))?.Path ??
|
||||
folderPath;
|
||||
|
||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedPath, forceCheck))
|
||||
{
|
||||
result.Add(new ScanResult()
|
||||
@ -313,6 +314,7 @@ public class ParseScannedFiles
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", library.Name, ProgressEventType.Started));
|
||||
|
||||
var processedScannedSeries = new List<ScannedSeriesResult>();
|
||||
//var processedScannedSeries = new ConcurrentBag<ScannedSeriesResult>();
|
||||
foreach (var folderPath in folders)
|
||||
{
|
||||
try
|
||||
@ -321,45 +323,15 @@ public class ParseScannedFiles
|
||||
|
||||
foreach (var scanResult in scanResults)
|
||||
{
|
||||
// scanResult is updated with the parsed infos
|
||||
await ProcessScanResult(scanResult, seriesPaths, library);
|
||||
|
||||
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
|
||||
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
|
||||
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
|
||||
|
||||
// Combine everything into scannedSeries
|
||||
foreach (var info in scanResult.ParserInfos)
|
||||
{
|
||||
try
|
||||
{
|
||||
TrackSeries(scannedSeries, info);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||
info?.FullFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count <= 0) continue;
|
||||
|
||||
UpdateSortOrder(scannedSeries, series);
|
||||
|
||||
processedScannedSeries.Add(new ScannedSeriesResult()
|
||||
{
|
||||
HasChanged = scanResult.HasChanged,
|
||||
ParsedSeries = series,
|
||||
ParsedInfos = scannedSeries[series]
|
||||
});
|
||||
}
|
||||
await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
|
||||
}
|
||||
|
||||
// This reduced a 1.1k series networked scan by a little more than 1 hour, but the order series were added to Kavita was not alphabetical
|
||||
// await Task.WhenAll(scanResults.Select(async scanResult =>
|
||||
// {
|
||||
// await ParseAndTrackSeries(library, seriesPaths, scanResult, processedScannedSeries);
|
||||
// }));
|
||||
|
||||
}
|
||||
catch (ArgumentException ex)
|
||||
{
|
||||
@ -369,10 +341,52 @@ public class ParseScannedFiles
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Done", library.Name, ProgressEventType.Ended));
|
||||
|
||||
return processedScannedSeries;
|
||||
return processedScannedSeries.ToList();
|
||||
|
||||
}
|
||||
|
||||
private async Task ParseAndTrackSeries(Library library, IDictionary<string, IList<SeriesModified>> seriesPaths, ScanResult scanResult,
|
||||
List<ScannedSeriesResult> processedScannedSeries)
|
||||
{
|
||||
// scanResult is updated with the parsed infos
|
||||
await ProcessScanResult(scanResult, seriesPaths, library); // NOTE: This may be able to be parallelized
|
||||
|
||||
// We now have all the parsed infos from the scan result, perform any merging that is necessary and post processing steps
|
||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||
|
||||
// Merge any series together (like Nagatoro/nagator.cbz, japanesename.cbz) -> Nagator series
|
||||
MergeLocalizedSeriesWithSeries(scanResult.ParserInfos);
|
||||
|
||||
// Combine everything into scannedSeries
|
||||
foreach (var info in scanResult.ParserInfos)
|
||||
{
|
||||
try
|
||||
{
|
||||
TrackSeries(scannedSeries, info);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"[ScannerService] There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||
info?.FullFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var series in scannedSeries.Keys)
|
||||
{
|
||||
if (scannedSeries[series].Count <= 0) continue;
|
||||
|
||||
UpdateSortOrder(scannedSeries, series);
|
||||
|
||||
processedScannedSeries.Add(new ScannedSeriesResult()
|
||||
{
|
||||
HasChanged = scanResult.HasChanged,
|
||||
ParsedSeries = series,
|
||||
ParsedInfos = scannedSeries[series]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For a given ScanResult, sets the ParserInfos on the result
|
||||
/// </summary>
|
||||
|
@ -27,7 +27,7 @@ public class BasicParser(IDirectoryService directoryService, IDefaultParser imag
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName),
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo
|
||||
};
|
||||
|
@ -3,13 +3,15 @@ using API.Entities.Enums;
|
||||
|
||||
namespace API.Services.Tasks.Scanner.Parser;
|
||||
|
||||
public class BookParser(IDirectoryService directoryService, IBookService bookService, IDefaultParser basicParser) : DefaultParser(directoryService)
|
||||
public class BookParser(IDirectoryService directoryService, IBookService bookService, BasicParser basicParser) : DefaultParser(directoryService)
|
||||
{
|
||||
public override ParserInfo Parse(string filePath, string rootPath, string libraryRoot, LibraryType type, ComicInfo comicInfo = null)
|
||||
{
|
||||
var info = bookService.ParseInfo(filePath);
|
||||
if (info == null) return null;
|
||||
|
||||
info.ComicInfo = comicInfo;
|
||||
|
||||
// This catches when original library type is Manga/Comic and when parsing with non
|
||||
if (Parser.ParseVolume(info.Series) != Parser.LooseLeafVolume) // Shouldn't this be info.Volume != DefaultVolume?
|
||||
{
|
||||
|
@ -34,7 +34,7 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo,
|
||||
Chapters = Parser.ParseComicChapter(fileName),
|
||||
@ -102,4 +102,33 @@ public class ComicVineParser(IDirectoryService directoryService) : DefaultParser
|
||||
{
|
||||
return type == LibraryType.ComicVine;
|
||||
}
|
||||
|
||||
private new static void UpdateFromComicInfo(ParserInfo info)
|
||||
{
|
||||
if (info.ComicInfo == null) return;
|
||||
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Volume))
|
||||
{
|
||||
info.Volumes = info.ComicInfo.Volume;
|
||||
}
|
||||
if (string.IsNullOrEmpty(info.LocalizedSeries) && !string.IsNullOrEmpty(info.ComicInfo.LocalizedSeries))
|
||||
{
|
||||
info.LocalizedSeries = info.ComicInfo.LocalizedSeries.Trim();
|
||||
}
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.Number))
|
||||
{
|
||||
info.Chapters = info.ComicInfo.Number;
|
||||
if (info.IsSpecial && Parser.DefaultChapter != info.Chapters)
|
||||
{
|
||||
info.IsSpecial = false;
|
||||
info.Volumes = $"{Parser.SpecialVolumeNumber}";
|
||||
}
|
||||
}
|
||||
|
||||
// Patch is SeriesSort from ComicInfo
|
||||
if (!string.IsNullOrEmpty(info.ComicInfo.TitleSort))
|
||||
{
|
||||
info.SeriesSort = info.ComicInfo.TitleSort.Trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ public class ImageParser(IDirectoryService directoryService) : DefaultParser(dir
|
||||
ComicInfo = comicInfo,
|
||||
Format = MangaFormat.Image,
|
||||
Filename = Path.GetFileName(filePath),
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Title = fileName,
|
||||
};
|
||||
ParseFromFallbackFolders(filePath, libraryRoot, LibraryType.Image, ref ret);
|
||||
|
@ -14,7 +14,7 @@ public class PdfParser(IDirectoryService directoryService) : DefaultParser(direc
|
||||
Filename = Path.GetFileName(filePath),
|
||||
Format = Parser.ParseFormat(filePath),
|
||||
Title = Parser.RemoveExtensionIfSupported(fileName)!,
|
||||
FullFilePath = filePath,
|
||||
FullFilePath = Parser.NormalizePath(filePath),
|
||||
Series = string.Empty,
|
||||
ComicInfo = comicInfo,
|
||||
Chapters = type == LibraryType.Comic
|
||||
|
@ -203,15 +203,16 @@ public class ProcessSeries : IProcessSeries
|
||||
|
||||
|
||||
// Process reading list after commit as we need to commit per list
|
||||
BackgroundJob.Enqueue(() => _readingListService.CreateReadingListsFromSeries(library.Id, series.Id));
|
||||
await _readingListService.CreateReadingListsFromSeries(library.Id, series.Id);
|
||||
|
||||
if (seriesAdded)
|
||||
{
|
||||
// See if any recommendations can link up to the series and pre-fetch external metadata for the series
|
||||
_logger.LogInformation("Linking up External Recommendations new series (if applicable)");
|
||||
|
||||
BackgroundJob.Enqueue(() =>
|
||||
_externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
|
||||
// BackgroundJob.Enqueue(() =>
|
||||
// _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type));
|
||||
await _externalMetadataService.GetNewSeriesData(series.Id, series.Library.Type);
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.SeriesAdded,
|
||||
MessageFactory.SeriesAddedEvent(series.Id, series.Name, series.LibraryId), false);
|
||||
@ -232,9 +233,11 @@ public class ProcessSeries : IProcessSeries
|
||||
|
||||
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
|
||||
await _metadataService.GenerateCoversForSeries(series, settings.EncodeMediaAs, settings.CoverImageSize);
|
||||
BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
|
||||
// BackgroundJob.Enqueue(() => _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate));
|
||||
await _wordCountAnalyzerService.ScanSeries(series.LibraryId, series.Id, forceUpdate);
|
||||
}
|
||||
|
||||
|
||||
private async Task ReportDuplicateSeriesLookup(Library library, ParserInfo firstInfo, Exception ex)
|
||||
{
|
||||
var seriesCollisions = await _unitOfWork.SeriesRepository.GetAllSeriesByAnyName(firstInfo.LocalizedSeries, string.Empty, library.Id, firstInfo.Format);
|
||||
@ -581,7 +584,7 @@ public class ProcessSeries : IProcessSeries
|
||||
{
|
||||
// TODO: Push this to UI in some way
|
||||
if (!ex.Message.Equals("Sequence contains more than one matching element")) throw;
|
||||
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||
_logger.LogCritical(ex, "[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||
throw new KavitaException(
|
||||
$"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan");
|
||||
}
|
||||
@ -705,7 +708,7 @@ public class ProcessSeries : IProcessSeries
|
||||
{
|
||||
// Ensure we remove any files that no longer exist AND order
|
||||
existingChapter.Files = existingChapter.Files
|
||||
.Where(f => parsedInfos.Any(p => p.FullFilePath == f.FilePath))
|
||||
.Where(f => parsedInfos.Any(p => Parser.Parser.NormalizePath(p.FullFilePath) == Parser.Parser.NormalizePath(f.FilePath)))
|
||||
.OrderByNatural(f => f.FilePath).ToList();
|
||||
existingChapter.Pages = existingChapter.Files.Sum(f => f.Pages);
|
||||
}
|
||||
|
@ -325,7 +325,7 @@ public class ScannerService : IScannerService
|
||||
await _metadataService.RemoveAbandonedMetadataKeys();
|
||||
|
||||
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(existingChapterIdsToClean));
|
||||
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory));
|
||||
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
|
||||
}
|
||||
|
||||
private void TrackFoundSeriesAndFiles(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, IList<ScannedSeriesResult> seenSeries)
|
||||
@ -485,7 +485,8 @@ public class ScannerService : IScannerService
|
||||
public async Task ScanLibrary(int libraryId, bool forceUpdate = false, bool isSingleScan = true)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
|
||||
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId,
|
||||
LibraryIncludes.Folders | LibraryIncludes.FileTypes | LibraryIncludes.ExcludePatterns);
|
||||
var libraryFolderPaths = library!.Folders.Select(fp => fp.Path).ToList();
|
||||
if (!await CheckMounts(library.Name, libraryFolderPaths)) return;
|
||||
|
||||
@ -501,48 +502,16 @@ public class ScannerService : IScannerService
|
||||
}
|
||||
|
||||
|
||||
var totalFiles = 0;
|
||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
|
||||
var (scanElapsedTime, processedSeries) = await ScanFiles(library, libraryFolderPaths,
|
||||
shouldUseLibraryScan, forceUpdate);
|
||||
|
||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
TrackFoundSeriesAndFiles(parsedSeries, processedSeries);
|
||||
|
||||
// We need to remove any keys where there is no actual parser info
|
||||
var toProcess = parsedSeries.Keys
|
||||
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
|
||||
.ToList();
|
||||
var totalFiles = await ProcessParsedSeries(forceUpdate, parsedSeries, library, scanElapsedTime);
|
||||
|
||||
if (toProcess.Count > 0)
|
||||
{
|
||||
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
|
||||
await _processSeries.Prime();
|
||||
}
|
||||
|
||||
var tasks = new List<Task>();
|
||||
foreach (var pSeries in toProcess)
|
||||
{
|
||||
totalFiles += parsedSeries[pSeries].Count;
|
||||
//tasks.Add();
|
||||
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate);
|
||||
}
|
||||
|
||||
//await Task.WhenAll(tasks);
|
||||
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
|
||||
|
||||
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
|
||||
|
||||
var time = DateTime.Now;
|
||||
foreach (var folderPath in library.Folders)
|
||||
{
|
||||
folderPath.UpdateLastScanned(time);
|
||||
}
|
||||
|
||||
library.UpdateLastScanned(time);
|
||||
UpdateLastScanned(library);
|
||||
|
||||
|
||||
_unitOfWork.LibraryRepository.Update(library);
|
||||
@ -566,28 +535,7 @@ public class ScannerService : IScannerService
|
||||
totalFiles, parsedSeries.Count, sw.ElapsedMilliseconds, library.Name);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
|
||||
// NOTE: This implementation is expensive
|
||||
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
|
||||
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
|
||||
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
|
||||
removedSeries.Count, removedSeries.Select(s => s.Name));
|
||||
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
foreach (var s in removedSeries)
|
||||
{
|
||||
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
|
||||
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
|
||||
}
|
||||
await RemoveSeriesNotFound(parsedSeries, library);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -598,7 +546,77 @@ public class ScannerService : IScannerService
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, string.Empty));
|
||||
await _metadataService.RemoveAbandonedMetadataKeys();
|
||||
|
||||
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.TempDirectory));
|
||||
BackgroundJob.Enqueue(() => _directoryService.ClearDirectory(_directoryService.CacheDirectory));
|
||||
}
|
||||
|
||||
private async Task RemoveSeriesNotFound(Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Could I delete anything in a Library's Series where the LastScan date is before scanStart?
|
||||
// NOTE: This implementation is expensive
|
||||
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan");
|
||||
var removedSeries = await _unitOfWork.SeriesRepository.RemoveSeriesNotInList(parsedSeries.Keys.ToList(), library.Id);
|
||||
_logger.LogDebug("[ScannerService] Found {Count} series that needs to be removed: {SeriesList}",
|
||||
removedSeries.Count, removedSeries.Select(s => s.Name));
|
||||
_logger.LogDebug("[ScannerService] Removing Series that were not found during the scan - complete");
|
||||
|
||||
await _unitOfWork.CommitAsync();
|
||||
|
||||
foreach (var s in removedSeries)
|
||||
{
|
||||
await _eventHub.SendMessageAsync(MessageFactory.SeriesRemoved,
|
||||
MessageFactory.SeriesRemovedEvent(s.Id, s.Name, s.LibraryId), false);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogCritical(ex, "[ScannerService] There was an issue deleting series for cleanup. Please check logs and rescan");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> ProcessParsedSeries(bool forceUpdate, Dictionary<ParsedSeries, IList<ParserInfo>> parsedSeries, Library library, long scanElapsedTime)
|
||||
{
|
||||
var toProcess = parsedSeries.Keys
|
||||
.Where(k => parsedSeries[k].Any() && !string.IsNullOrEmpty(parsedSeries[k][0].Filename))
|
||||
.ToList();
|
||||
|
||||
if (toProcess.Count > 0)
|
||||
{
|
||||
// This grabs all the shared entities, like tags, genre, people. To be solved later in this refactor on how to not have blocking access.
|
||||
await _processSeries.Prime();
|
||||
}
|
||||
|
||||
var totalFiles = 0;
|
||||
//var tasks = new List<Task>();
|
||||
foreach (var pSeries in toProcess)
|
||||
{
|
||||
totalFiles += parsedSeries[pSeries].Count;
|
||||
//tasks.Add(_processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate));
|
||||
// We can't do Task.WhenAll because of concurrency issues.
|
||||
await _processSeries.ProcessSeriesAsync(parsedSeries[pSeries], library, forceUpdate);
|
||||
}
|
||||
|
||||
//await Task.WhenAll(tasks);
|
||||
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||
MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
|
||||
|
||||
_logger.LogInformation("[ScannerService] Finished file scan in {ScanAndUpdateTime} milliseconds. Updating database", scanElapsedTime);
|
||||
|
||||
return totalFiles;
|
||||
}
|
||||
|
||||
private static void UpdateLastScanned(Library library)
|
||||
{
|
||||
var time = DateTime.Now;
|
||||
foreach (var folderPath in library.Folders)
|
||||
{
|
||||
folderPath.UpdateLastScanned(time);
|
||||
}
|
||||
|
||||
library.UpdateLastScanned(time);
|
||||
}
|
||||
|
||||
private async Task<Tuple<long, IList<ScannedSeriesResult>>> ScanFiles(Library library, IEnumerable<string> dirs,
|
||||
|
@ -260,6 +260,7 @@ public class Startup
|
||||
await MigrateLooseLeafChapters.Migrate(dataContext, unitOfWork, directoryService, logger);
|
||||
await MigrateChapterFields.Migrate(dataContext, unitOfWork, logger);
|
||||
await MigrateChapterRange.Migrate(dataContext, unitOfWork, logger);
|
||||
await MigrateMangaFilePath.Migrate(dataContext, logger);
|
||||
|
||||
// Update the version in the DB after all migrations are run
|
||||
var installVersion = await unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.InstallVersion);
|
||||
|
@ -32,7 +32,7 @@ export class LibraryService {
|
||||
return of(this.libraryNames);
|
||||
}
|
||||
|
||||
return this.httpClient.get<Library[]>(this.baseUrl + 'library').pipe(map(libraries => {
|
||||
return this.httpClient.get<Library[]>(this.baseUrl + 'library/libraries').pipe(map(libraries => {
|
||||
this.libraryNames = {};
|
||||
libraries.forEach(lib => {
|
||||
if (this.libraryNames !== undefined) {
|
||||
@ -47,7 +47,7 @@ export class LibraryService {
|
||||
if (this.libraryNames != undefined && this.libraryNames.hasOwnProperty(libraryId)) {
|
||||
return of(this.libraryNames[libraryId]);
|
||||
}
|
||||
return this.httpClient.get<Library[]>(this.baseUrl + 'library').pipe(map(l => {
|
||||
return this.httpClient.get<Library[]>(this.baseUrl + 'library/libraries').pipe(map(l => {
|
||||
this.libraryNames = {};
|
||||
l.forEach(lib => {
|
||||
if (this.libraryNames !== undefined) {
|
||||
@ -75,8 +75,12 @@ export class LibraryService {
|
||||
return this.httpClient.get<JumpKey[]>(this.baseUrl + 'library/jump-bar?libraryId=' + libraryId);
|
||||
}
|
||||
|
||||
getLibrary(libraryId: number) {
|
||||
return this.httpClient.get<Library>(this.baseUrl + 'library?libraryId=' + libraryId);
|
||||
}
|
||||
|
||||
getLibraries() {
|
||||
return this.httpClient.get<Library[]>(this.baseUrl + 'library');
|
||||
return this.httpClient.get<Library[]>(this.baseUrl + 'library/libraries');
|
||||
}
|
||||
|
||||
updateLibrariesForMember(username: string, selectedLibraries: Library[]) {
|
||||
|
@ -55,13 +55,6 @@ export class ManageTasksSettingsComponent implements OnInit {
|
||||
api: this.serverService.convertMedia(),
|
||||
successMessage: 'convert-media-task-success'
|
||||
},
|
||||
// I removed this as it's not really needed, given that External Recs are the only thing that fill this cache now
|
||||
// {
|
||||
// name: 'bust-cache-task',
|
||||
// description: 'bust-cache-task-desc',
|
||||
// api: this.serverService.bustCache(),
|
||||
// successMessage: 'bust-cache-task-success'
|
||||
// },
|
||||
{
|
||||
name: 'bust-locale-task',
|
||||
description: 'bust-locale-task-desc',
|
||||
@ -245,20 +238,20 @@ export class ManageTasksSettingsComponent implements OnInit {
|
||||
modelSettings.taskScan = this.settingsForm.get('taskScan')?.value;
|
||||
modelSettings.taskCleanup = this.settingsForm.get('taskCleanup')?.value;
|
||||
|
||||
if (this.serverSettings.taskBackup === this.customOption) {
|
||||
if (modelSettings.taskBackup === this.customOption) {
|
||||
modelSettings.taskBackup = this.settingsForm.get('taskBackupCustom')?.value;
|
||||
}
|
||||
|
||||
if (this.serverSettings.taskScan === this.customOption) {
|
||||
if (modelSettings.taskScan === this.customOption) {
|
||||
modelSettings.taskScan = this.settingsForm.get('taskScanCustom')?.value;
|
||||
}
|
||||
|
||||
if (this.serverSettings.taskScan === this.customOption) {
|
||||
if (modelSettings.taskCleanup === this.customOption) {
|
||||
modelSettings.taskCleanup = this.settingsForm.get('taskCleanupCustom')?.value;
|
||||
}
|
||||
|
||||
|
||||
this.settingsService.updateServerSettings(modelSettings).pipe(take(1)).subscribe(async (settings: ServerSettings) => {
|
||||
this.settingsService.updateServerSettings(modelSettings).pipe(take(1)).subscribe((settings: ServerSettings) => {
|
||||
this.serverSettings = settings;
|
||||
this.resetForm();
|
||||
this.recurringTasks$ = this.serverService.getRecurringJobs().pipe(shareReplay());
|
||||
|
@ -7,7 +7,7 @@
|
||||
<div class="mt-3">
|
||||
@if (isAdmin) {
|
||||
<div class="d-flex justify-content-center">
|
||||
<p>{{t('no-libraries')}} <a routerLink="/admin/dashboard" fragment="libraries">{{t('server-settings-link')}}</a></p>
|
||||
<p>{{t('no-libraries')}} <a routerLink="/admin/dashboard" fragment="libraries">{{t('server-settings-link')}}</a>.</p>
|
||||
</div>
|
||||
} @else {
|
||||
<div class="d-flex justify-content-center">
|
||||
|
@ -231,7 +231,23 @@ export class LibraryDetailComponent implements OnInit {
|
||||
async handleAction(action: ActionItem<Library>, library: Library) {
|
||||
let lib: Partial<Library> = library;
|
||||
if (library === undefined) {
|
||||
lib = {id: this.libraryId, name: this.libraryName};
|
||||
//lib = {id: this.libraryId, name: this.libraryName}; // BUG: We need the whole library for editLibrary
|
||||
this.libraryService.getLibrary(this.libraryId).subscribe(async library => {
|
||||
switch (action.action) {
|
||||
case(Action.Scan):
|
||||
await this.actionService.scanLibrary(library);
|
||||
break;
|
||||
case(Action.RefreshMetadata):
|
||||
await this.actionService.refreshMetadata(library);
|
||||
break;
|
||||
case(Action.Edit):
|
||||
this.actionService.editLibrary(library);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
return
|
||||
}
|
||||
switch (action.action) {
|
||||
case(Action.Scan):
|
||||
|
@ -33,11 +33,11 @@
|
||||
|
||||
<div class="mb-3">
|
||||
<label for="library-type" class="form-label">{{t('type-label')}}</label>
|
||||
<i class="fa fa-info-circle ms-1" placement="top" [ngbTooltip]="typeTooltip" role="button" tabindex="0"></i>
|
||||
<i class="fa fa-info-circle ms-1" placement="end" [ngbTooltip]="typeTooltip" role="button" tabindex="0"></i>
|
||||
@if(IsKavitaPlusEligible) {
|
||||
<span class="float-end">
|
||||
{{t('kavitaplus-eligible-label')}}
|
||||
<i class="fa fa-info-circle ms-1" placement="top" [ngbTooltip]="kavitaplusEligibleTooltip" role="button" tabindex="0"></i>
|
||||
<i class="fa fa-info-circle ms-1" placement="start" [ngbTooltip]="kavitaplusEligibleTooltip" role="button" tabindex="0"></i>
|
||||
</span>
|
||||
}
|
||||
<ng-template #typeTooltip>{{t('type-tooltip')}}</ng-template>
|
||||
|
@ -11,7 +11,7 @@
|
||||
<label for="time-select-read-by-day" class="form-check-label"></label>
|
||||
<select id="time-select-read-by-day" class="form-select" formControlName="users"
|
||||
[class.is-invalid]="formGroup.get('users')?.invalid && formGroup.get('users')?.touched">
|
||||
<option [value]="0">All Users</option>
|
||||
<option [value]="0">{{t('all-users')}}</option>
|
||||
<option *ngFor="let item of users$ | async" [value]="item.id">{{item.username}}</option>
|
||||
</select>
|
||||
</div>
|
||||
|
@ -4,7 +4,7 @@
|
||||
<ng-container>
|
||||
<div class="col-auto mb-2">
|
||||
<app-icon-and-title [label]="t('total-series-label')" [clickable]="false" fontClasses="fa-solid fa-book-open" [title]="t('total-series-tooltip', {count: stats.seriesCount | number})">
|
||||
{{stats.seriesCount | compactNumber}} Series
|
||||
{{t('series-count', {num: stats.seriesCount | number})}}
|
||||
</app-icon-and-title>
|
||||
</div>
|
||||
<div class="vr d-none d-lg-block m-2"></div>
|
||||
@ -13,7 +13,7 @@
|
||||
<ng-container >
|
||||
<div class="col-auto mb-2">
|
||||
<app-icon-and-title [label]="t('total-volumes-label')" [clickable]="false" fontClasses="fas fa-book" [title]="t('total-volumes-tooltip', {count: stats.volumeCount | number})">
|
||||
{{stats.volumeCount | compactNumber}} Volumes
|
||||
{{t('volume-count', {num: stats.volumeCount | number})}}
|
||||
</app-icon-and-title>
|
||||
</div>
|
||||
<div class="vr d-none d-lg-block m-2"></div>
|
||||
@ -22,7 +22,7 @@
|
||||
<ng-container>
|
||||
<div class="col-auto mb-2">
|
||||
<app-icon-and-title [label]="t('total-files-label')" [clickable]="false" fontClasses="fa-regular fa-file" [title]="t('total-files-tooltip', {count: stats.totalFiles | number})">
|
||||
{{stats.totalFiles | compactNumber}} Files
|
||||
{{t('file-count', {num: stats.totalFiles | number})}}
|
||||
</app-icon-and-title>
|
||||
</div>
|
||||
<div class="vr d-none d-lg-block m-2"></div>
|
||||
@ -39,7 +39,7 @@
|
||||
|
||||
<ng-container>
|
||||
<div class="col-auto mb-2">
|
||||
<app-icon-and-title label="Total Genres" [clickable]="true" fontClasses="fa-solid fa-tags" [title]="t('total-genres-tooltip', {count: stats.totalGenres | number})" (click)="openGenreList();$event.stopPropagation();">
|
||||
<app-icon-and-title [label]="t('total-genres-label')" [clickable]="true" fontClasses="fa-solid fa-tags" [title]="t('total-genres-tooltip', {count: stats.totalGenres | number})" (click)="openGenreList();$event.stopPropagation();">
|
||||
{{t('genre-count', {num: stats.totalGenres | compactNumber})}}
|
||||
</app-icon-and-title>
|
||||
</div>
|
||||
@ -88,7 +88,7 @@
|
||||
</app-stat-list>
|
||||
</div>
|
||||
<div class="col-auto">
|
||||
<app-stat-list [data$]="recentlyRead$" title="Recently Read" [image]="seriesImage" [handleClick]="openSeries"></app-stat-list>
|
||||
<app-stat-list [data$]="recentlyRead$" [title]="t('recently-read-title')" [image]="seriesImage" [handleClick]="openSeries"></app-stat-list>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -706,7 +706,7 @@
|
||||
"description": "Complete the form to register an admin account",
|
||||
"username-label": "{{common.username}}",
|
||||
"email-label": "{{common.email}}",
|
||||
"email-tooltip": "Email does not need to be a real address, but provides access to forgot password. It is not sent outside the server unless forgot password is used without a custom email service host.",
|
||||
"email-tooltip": "Email does not need to be a real address, but provides access to forgot password. It is not sent outside the server unless Kavita emails you.",
|
||||
"password-label": "{{common.password}}",
|
||||
"required-field": "{{validation.required-field}}",
|
||||
"valid-email": "{{validation.valid-email}}",
|
||||
@ -1779,6 +1779,7 @@
|
||||
"y-axis-label": "Hours Read",
|
||||
"no-data": "No Reading Progress",
|
||||
"time-frame-label": "Time Frame",
|
||||
"all-users": "All Users",
|
||||
"this-week": "{{time-periods.this-week}}",
|
||||
"last-7-days": "{{time-periods.last-7-days}}",
|
||||
"last-30-days": "{{time-periods.last-30-days}}",
|
||||
@ -1841,6 +1842,9 @@
|
||||
"popular-libraries-title": "Popular Libraries",
|
||||
"popular-series-title": "Popular Series",
|
||||
"recently-read-title": "Recently Read",
|
||||
"series-count": "{{num}} Series",
|
||||
"volume-count": "{{num}} Volumes",
|
||||
"file-count": "{{num}} Files",
|
||||
"genre-count": "{{num}} Genres",
|
||||
"tag-count": "{{num}} Tags",
|
||||
"people-count": "{{num}} People",
|
||||
|
40
openapi.json
40
openapi.json
@ -2502,6 +2502,46 @@
|
||||
}
|
||||
},
|
||||
"/api/Library": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"Library"
|
||||
],
|
||||
"summary": "Return a specific library",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "libraryId",
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Success",
|
||||
"content": {
|
||||
"text/plain": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/LibraryDto"
|
||||
}
|
||||
},
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/LibraryDto"
|
||||
}
|
||||
},
|
||||
"text/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/LibraryDto"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/Library/libraries": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"Library"
|
||||
|
Loading…
x
Reference in New Issue
Block a user