mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
Scan Loop Fixes (#1572)
* Cleanup some messaging in the scan loop to be more context bearing * Added Response Caching to Series Detail for 1 min, due to the heavy nature of the call. * Refactored code to make it so that processing of series runs sync correctly. Added a log to inform the user of corrupted volume from buggy code in v0.5.6. * Moved folder watching out of experimental * Fixed an issue where empty folders could break the scan loop * Another fix for when dates aren't valid, the scanner wouldn't get the proper min and would throw an exception (develop) * Implemented the ability to edit release year from the UI for a series. * Added a unit test for some new logic * Code smells
This commit is contained in:
parent
78b043af74
commit
13226fecc1
@ -1001,6 +1001,41 @@ public class SeriesServiceTests
|
|||||||
Assert.True(series.Metadata.GenresLocked);
|
Assert.True(series.Metadata.GenresLocked);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public async Task UpdateSeriesMetadata_ShouldNotUpdateReleaseYear_IfLessThan1000()
|
||||||
|
{
|
||||||
|
await ResetDb();
|
||||||
|
var s = new Series()
|
||||||
|
{
|
||||||
|
Name = "Test",
|
||||||
|
Library = new Library()
|
||||||
|
{
|
||||||
|
Name = "Test LIb",
|
||||||
|
Type = LibraryType.Book,
|
||||||
|
},
|
||||||
|
Metadata = DbFactory.SeriesMetadata(new List<CollectionTag>())
|
||||||
|
};
|
||||||
|
_context.Series.Add(s);
|
||||||
|
await _context.SaveChangesAsync();
|
||||||
|
|
||||||
|
var success = await _seriesService.UpdateSeriesMetadata(new UpdateSeriesMetadataDto()
|
||||||
|
{
|
||||||
|
SeriesMetadata = new SeriesMetadataDto()
|
||||||
|
{
|
||||||
|
SeriesId = 1,
|
||||||
|
ReleaseYear = 100,
|
||||||
|
},
|
||||||
|
CollectionTags = new List<CollectionTagDto>()
|
||||||
|
});
|
||||||
|
|
||||||
|
Assert.True(success);
|
||||||
|
|
||||||
|
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(1);
|
||||||
|
Assert.NotNull(series.Metadata);
|
||||||
|
Assert.Equal(0, series.Metadata.ReleaseYear);
|
||||||
|
Assert.False(series.Metadata.ReleaseYearLocked);
|
||||||
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
#region GetFirstChapterForMetadata
|
#region GetFirstChapterForMetadata
|
||||||
|
@ -377,6 +377,7 @@ public class SeriesController : BaseApiController
|
|||||||
/// <param name="seriesId"></param>
|
/// <param name="seriesId"></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
/// <remarks>Do not rely on this API externally. May change without hesitation. </remarks>
|
/// <remarks>Do not rely on this API externally. May change without hesitation. </remarks>
|
||||||
|
[ResponseCache(Duration = 60, Location = ResponseCacheLocation.Any, VaryByQueryKeys = new [] {"seriesId"})]
|
||||||
[HttpGet("series-detail")]
|
[HttpGet("series-detail")]
|
||||||
public async Task<ActionResult<SeriesDetailDto>> GetSeriesDetailBreakdown(int seriesId)
|
public async Task<ActionResult<SeriesDetailDto>> GetSeriesDetailBreakdown(int seriesId)
|
||||||
{
|
{
|
||||||
|
@ -8,7 +8,6 @@ public record UpdateUserDto
|
|||||||
public string Username { get; set; }
|
public string Username { get; set; }
|
||||||
/// List of Roles to assign to user. If admin not present, Pleb will be applied.
|
/// List of Roles to assign to user. If admin not present, Pleb will be applied.
|
||||||
/// If admin present, all libraries will be granted access and will ignore those from DTO.
|
/// If admin present, all libraries will be granted access and will ignore those from DTO.
|
||||||
/// </summary>
|
|
||||||
public IList<string> Roles { get; init; }
|
public IList<string> Roles { get; init; }
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// A list of libraries to grant access to
|
/// A list of libraries to grant access to
|
||||||
|
@ -79,6 +79,7 @@ public class SeriesMetadataDto
|
|||||||
public bool PublishersLocked { get; set; }
|
public bool PublishersLocked { get; set; }
|
||||||
public bool TranslatorsLocked { get; set; }
|
public bool TranslatorsLocked { get; set; }
|
||||||
public bool CoverArtistsLocked { get; set; }
|
public bool CoverArtistsLocked { get; set; }
|
||||||
|
public bool ReleaseYearLocked { get; set; }
|
||||||
|
|
||||||
|
|
||||||
public int SeriesId { get; set; }
|
public int SeriesId { get; set; }
|
||||||
|
1664
API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.Designer.cs
generated
Normal file
1664
API/Data/Migrations/20221006013956_ReleaseYearOnSeriesEdit.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,26 @@
|
|||||||
|
using Microsoft.EntityFrameworkCore.Migrations;
|
||||||
|
|
||||||
|
#nullable disable
|
||||||
|
|
||||||
|
namespace API.Data.Migrations
|
||||||
|
{
|
||||||
|
public partial class ReleaseYearOnSeriesEdit : Migration
|
||||||
|
{
|
||||||
|
protected override void Up(MigrationBuilder migrationBuilder)
|
||||||
|
{
|
||||||
|
migrationBuilder.AddColumn<bool>(
|
||||||
|
name: "ReleaseYearLocked",
|
||||||
|
table: "SeriesMetadata",
|
||||||
|
type: "INTEGER",
|
||||||
|
nullable: false,
|
||||||
|
defaultValue: false);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override void Down(MigrationBuilder migrationBuilder)
|
||||||
|
{
|
||||||
|
migrationBuilder.DropColumn(
|
||||||
|
name: "ReleaseYearLocked",
|
||||||
|
table: "SeriesMetadata");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -647,6 +647,9 @@ namespace API.Data.Migrations
|
|||||||
b.Property<int>("ReleaseYear")
|
b.Property<int>("ReleaseYear")
|
||||||
.HasColumnType("INTEGER");
|
.HasColumnType("INTEGER");
|
||||||
|
|
||||||
|
b.Property<bool>("ReleaseYearLocked")
|
||||||
|
.HasColumnType("INTEGER");
|
||||||
|
|
||||||
b.Property<uint>("RowVersion")
|
b.Property<uint>("RowVersion")
|
||||||
.IsConcurrencyToken()
|
.IsConcurrencyToken()
|
||||||
.HasColumnType("INTEGER");
|
.HasColumnType("INTEGER");
|
||||||
|
@ -67,6 +67,7 @@ public class SeriesMetadata : IHasConcurrencyToken
|
|||||||
public bool PublisherLocked { get; set; }
|
public bool PublisherLocked { get; set; }
|
||||||
public bool TranslatorLocked { get; set; }
|
public bool TranslatorLocked { get; set; }
|
||||||
public bool CoverArtistLocked { get; set; }
|
public bool CoverArtistLocked { get; set; }
|
||||||
|
public bool ReleaseYearLocked { get; set; }
|
||||||
|
|
||||||
|
|
||||||
// Relationship
|
// Relationship
|
||||||
|
@ -645,12 +645,15 @@ public class DirectoryService : IDirectoryService
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Recursively scans a folder and returns the max last write time on any folders and files
|
/// Recursively scans a folder and returns the max last write time on any folders and files
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
/// <remarks>If the folder is empty, this will return MaxValue for a DateTime</remarks>
|
||||||
/// <param name="folderPath"></param>
|
/// <param name="folderPath"></param>
|
||||||
/// <returns>Max Last Write Time</returns>
|
/// <returns>Max Last Write Time</returns>
|
||||||
public DateTime GetLastWriteTime(string folderPath)
|
public DateTime GetLastWriteTime(string folderPath)
|
||||||
{
|
{
|
||||||
if (!FileSystem.Directory.Exists(folderPath)) throw new IOException($"{folderPath} does not exist");
|
if (!FileSystem.Directory.Exists(folderPath)) throw new IOException($"{folderPath} does not exist");
|
||||||
return Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories).Max(path => FileSystem.File.GetLastWriteTime(path));
|
var fileEntries = Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories);
|
||||||
|
if (fileEntries.Length == 0) return DateTime.MaxValue;
|
||||||
|
return fileEntries.Max(path => FileSystem.File.GetLastWriteTime(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
@ -76,6 +76,12 @@ public class SeriesService : ISeriesService
|
|||||||
series.Metadata.AgeRatingLocked = true;
|
series.Metadata.AgeRatingLocked = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (updateSeriesMetadataDto.SeriesMetadata.ReleaseYear > 1000 && series.Metadata.ReleaseYear != updateSeriesMetadataDto.SeriesMetadata.ReleaseYear)
|
||||||
|
{
|
||||||
|
series.Metadata.ReleaseYear = updateSeriesMetadataDto.SeriesMetadata.ReleaseYear;
|
||||||
|
series.Metadata.ReleaseYearLocked = true;
|
||||||
|
}
|
||||||
|
|
||||||
if (series.Metadata.PublicationStatus != updateSeriesMetadataDto.SeriesMetadata.PublicationStatus)
|
if (series.Metadata.PublicationStatus != updateSeriesMetadataDto.SeriesMetadata.PublicationStatus)
|
||||||
{
|
{
|
||||||
series.Metadata.PublicationStatus = updateSeriesMetadataDto.SeriesMetadata.PublicationStatus;
|
series.Metadata.PublicationStatus = updateSeriesMetadataDto.SeriesMetadata.PublicationStatus;
|
||||||
@ -167,6 +173,7 @@ public class SeriesService : ISeriesService
|
|||||||
series.Metadata.CoverArtistLocked = updateSeriesMetadataDto.SeriesMetadata.CoverArtistsLocked;
|
series.Metadata.CoverArtistLocked = updateSeriesMetadataDto.SeriesMetadata.CoverArtistsLocked;
|
||||||
series.Metadata.WriterLocked = updateSeriesMetadataDto.SeriesMetadata.WritersLocked;
|
series.Metadata.WriterLocked = updateSeriesMetadataDto.SeriesMetadata.WritersLocked;
|
||||||
series.Metadata.SummaryLocked = updateSeriesMetadataDto.SeriesMetadata.SummaryLocked;
|
series.Metadata.SummaryLocked = updateSeriesMetadataDto.SeriesMetadata.SummaryLocked;
|
||||||
|
series.Metadata.ReleaseYearLocked = updateSeriesMetadataDto.SeriesMetadata.ReleaseYearLocked;
|
||||||
|
|
||||||
if (!_unitOfWork.HasChanges())
|
if (!_unitOfWork.HasChanges())
|
||||||
{
|
{
|
||||||
|
@ -81,6 +81,7 @@ public class ParseScannedFiles
|
|||||||
if (scanDirectoryByDirectory)
|
if (scanDirectoryByDirectory)
|
||||||
{
|
{
|
||||||
// This is used in library scan, so we should check first for a ignore file and use that here as well
|
// This is used in library scan, so we should check first for a ignore file and use that here as well
|
||||||
|
// TODO: We need to calculate all folders till library root and see if any kavitaignores
|
||||||
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile);
|
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(folderPath, DirectoryService.KavitaIgnoreFile);
|
||||||
var matcher = _directoryService.CreateMatcherFromFile(potentialIgnoreFile);
|
var matcher = _directoryService.CreateMatcherFromFile(potentialIgnoreFile);
|
||||||
var directories = _directoryService.GetDirectories(folderPath, matcher).ToList();
|
var directories = _directoryService.GetDirectories(folderPath, matcher).ToList();
|
||||||
@ -228,62 +229,68 @@ public class ParseScannedFiles
|
|||||||
|
|
||||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started));
|
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started));
|
||||||
|
|
||||||
|
async Task ProcessFolder(IList<string> files, string folder)
|
||||||
|
{
|
||||||
|
var normalizedFolder = Parser.Parser.NormalizePath(folder);
|
||||||
|
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck))
|
||||||
|
{
|
||||||
|
var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
|
||||||
|
{
|
||||||
|
Series = fp.SeriesName,
|
||||||
|
Format = fp.Format,
|
||||||
|
}).ToList();
|
||||||
|
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
|
||||||
|
_logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
|
||||||
|
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress,
|
||||||
|
MessageFactory.FileScanProgressEvent(folder, libraryName, ProgressEventType.Updated));
|
||||||
|
if (files.Count == 0)
|
||||||
|
{
|
||||||
|
_logger.LogInformation("[ScannerService] {Folder} is empty", folder);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
||||||
|
var infos = files
|
||||||
|
.Select(file => _readingItemService.ParseFile(file, folder, libraryType))
|
||||||
|
.Where(info => info != null)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
|
||||||
|
MergeLocalizedSeriesWithSeries(infos);
|
||||||
|
|
||||||
|
foreach (var info in infos)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
TrackSeries(scannedSeries, info);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogError(ex,
|
||||||
|
"There was an exception that occurred during tracking {FilePath}. Skipping this file",
|
||||||
|
info.FullFilePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var series in scannedSeries.Keys)
|
||||||
|
{
|
||||||
|
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
|
||||||
|
{
|
||||||
|
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
foreach (var folderPath in folders)
|
foreach (var folderPath in folders)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, async (files, folder) =>
|
await ProcessFiles(folderPath, isLibraryScan, seriesPaths, ProcessFolder, forceCheck);
|
||||||
{
|
|
||||||
var normalizedFolder = Parser.Parser.NormalizePath(folder);
|
|
||||||
if (HasSeriesFolderNotChangedSinceLastScan(seriesPaths, normalizedFolder, forceCheck))
|
|
||||||
{
|
|
||||||
var parsedInfos = seriesPaths[normalizedFolder].Select(fp => new ParserInfo()
|
|
||||||
{
|
|
||||||
Series = fp.SeriesName,
|
|
||||||
Format = fp.Format,
|
|
||||||
}).ToList();
|
|
||||||
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
|
|
||||||
_logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
_logger.LogDebug("Found {Count} files for {Folder}", files.Count, folder);
|
|
||||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(folderPath, libraryName, ProgressEventType.Updated));
|
|
||||||
if (files.Count == 0)
|
|
||||||
{
|
|
||||||
_logger.LogInformation("[ScannerService] {Folder} is empty", folder);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
var scannedSeries = new ConcurrentDictionary<ParsedSeries, List<ParserInfo>>();
|
|
||||||
var infos = files
|
|
||||||
.Select(file => _readingItemService.ParseFile(file, folderPath, libraryType))
|
|
||||||
.Where(info => info != null)
|
|
||||||
.ToList();
|
|
||||||
|
|
||||||
|
|
||||||
MergeLocalizedSeriesWithSeries(infos);
|
|
||||||
|
|
||||||
foreach (var info in infos)
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
TrackSeries(scannedSeries, info);
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
_logger.LogError(ex, "There was an exception that occurred during tracking {FilePath}. Skipping this file", info.FullFilePath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// It would be really cool if we can emit an event when a folder hasn't been changed so we don't parse everything, but the first item to ensure we don't delete it
|
|
||||||
// Otherwise, we can do a last step in the DB where we validate all files on disk exist and if not, delete them. (easy but slow)
|
|
||||||
foreach (var series in scannedSeries.Keys)
|
|
||||||
{
|
|
||||||
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
|
|
||||||
{
|
|
||||||
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, forceCheck);
|
|
||||||
}
|
}
|
||||||
catch (ArgumentException ex)
|
catch (ArgumentException ex)
|
||||||
{
|
{
|
||||||
|
@ -15,6 +15,7 @@ using API.Parser;
|
|||||||
using API.Services.Tasks.Metadata;
|
using API.Services.Tasks.Metadata;
|
||||||
using API.SignalR;
|
using API.SignalR;
|
||||||
using Hangfire;
|
using Hangfire;
|
||||||
|
using Kavita.Common;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
|
|
||||||
namespace API.Services.Tasks.Scanner;
|
namespace API.Services.Tasks.Scanner;
|
||||||
@ -45,9 +46,9 @@ public class ProcessSeries : IProcessSeries
|
|||||||
private readonly IMetadataService _metadataService;
|
private readonly IMetadataService _metadataService;
|
||||||
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
|
private readonly IWordCountAnalyzerService _wordCountAnalyzerService;
|
||||||
|
|
||||||
private volatile IList<Genre> _genres;
|
private IList<Genre> _genres;
|
||||||
private volatile IList<Person> _people;
|
private IList<Person> _people;
|
||||||
private volatile IList<Tag> _tags;
|
private IList<Tag> _tags;
|
||||||
|
|
||||||
public ProcessSeries(IUnitOfWork unitOfWork, ILogger<ProcessSeries> logger, IEventHub eventHub,
|
public ProcessSeries(IUnitOfWork unitOfWork, ILogger<ProcessSeries> logger, IEventHub eventHub,
|
||||||
IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService,
|
IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService,
|
||||||
@ -117,7 +118,7 @@ public class ProcessSeries : IProcessSeries
|
|||||||
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
|
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
|
||||||
|
|
||||||
// parsedInfos[0] is not the first volume or chapter. We need to find it using a ComicInfo check (as it uses firstParsedInfo for series sort)
|
// parsedInfos[0] is not the first volume or chapter. We need to find it using a ComicInfo check (as it uses firstParsedInfo for series sort)
|
||||||
var firstParsedInfo = parsedInfos.FirstOrDefault(p => p.ComicInfo != null, parsedInfos[0]);
|
var firstParsedInfo = parsedInfos.FirstOrDefault(p => p.ComicInfo != null, firstInfo);
|
||||||
|
|
||||||
UpdateVolumes(series, parsedInfos);
|
UpdateVolumes(series, parsedInfos);
|
||||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||||
@ -235,12 +236,15 @@ public class ProcessSeries : IProcessSeries
|
|||||||
var chapters = series.Volumes.SelectMany(volume => volume.Chapters).ToList();
|
var chapters = series.Volumes.SelectMany(volume => volume.Chapters).ToList();
|
||||||
|
|
||||||
// Update Metadata based on Chapter metadata
|
// Update Metadata based on Chapter metadata
|
||||||
series.Metadata.ReleaseYear = chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).Min();
|
if (!series.Metadata.ReleaseYearLocked)
|
||||||
|
|
||||||
if (series.Metadata.ReleaseYear < 1000)
|
|
||||||
{
|
{
|
||||||
// Not a valid year, default to 0
|
series.Metadata.ReleaseYear = chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).DefaultIfEmpty().Min();
|
||||||
series.Metadata.ReleaseYear = 0;
|
|
||||||
|
if (series.Metadata.ReleaseYear < 1000)
|
||||||
|
{
|
||||||
|
// Not a valid year, default to 0
|
||||||
|
series.Metadata.ReleaseYear = 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the AgeRating as highest in all the comicInfos
|
// Set the AgeRating as highest in all the comicInfos
|
||||||
@ -440,8 +444,22 @@ public class ProcessSeries : IProcessSeries
|
|||||||
_logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
|
_logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
|
||||||
foreach (var volumeNumber in distinctVolumes)
|
foreach (var volumeNumber in distinctVolumes)
|
||||||
{
|
{
|
||||||
_logger.LogDebug("[ScannerService] Looking up volume for {volumeNumber}", volumeNumber);
|
_logger.LogDebug("[ScannerService] Looking up volume for {VolumeNumber}", volumeNumber);
|
||||||
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
|
Volume volume;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
if (ex.Message.Equals("Sequence contains more than one matching element"))
|
||||||
|
{
|
||||||
|
_logger.LogCritical("[ScannerService] Kavita found corrupted volume entries on {SeriesName}. Please delete the series from Kavita via UI and rescan", series.Name);
|
||||||
|
throw new KavitaException(
|
||||||
|
$"Kavita found corrupted volume entries on {series.Name}. Please delete the series from Kavita via UI and rescan");
|
||||||
|
}
|
||||||
|
throw;
|
||||||
|
}
|
||||||
if (volume == null)
|
if (volume == null)
|
||||||
{
|
{
|
||||||
volume = DbFactory.Volume(volumeNumber);
|
volume = DbFactory.Volume(volumeNumber);
|
||||||
@ -496,7 +514,7 @@ public class ProcessSeries : IProcessSeries
|
|||||||
series.Volumes = nonDeletedVolumes;
|
series.Volumes = nonDeletedVolumes;
|
||||||
}
|
}
|
||||||
|
|
||||||
_logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
|
_logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from count of {StartingVolumeCount} to {VolumeCount}",
|
||||||
series.Name, startingVolumeCount, series.Volumes.Count);
|
series.Name, startingVolumeCount, series.Volumes.Count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -433,12 +433,13 @@ public class ScannerService : IScannerService
|
|||||||
|
|
||||||
|
|
||||||
await _processSeries.Prime();
|
await _processSeries.Prime();
|
||||||
var processTasks = new List<Task>();
|
var processTasks = new List<Func<Task>>();
|
||||||
async Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
|
|
||||||
|
Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
|
||||||
{
|
{
|
||||||
var skippedScan = parsedInfo.Item1;
|
var skippedScan = parsedInfo.Item1;
|
||||||
var parsedFiles = parsedInfo.Item2;
|
var parsedFiles = parsedInfo.Item2;
|
||||||
if (parsedFiles.Count == 0) return;
|
if (parsedFiles.Count == 0) return Task.CompletedTask;
|
||||||
|
|
||||||
var foundParsedSeries = new ParsedSeries()
|
var foundParsedSeries = new ParsedSeries()
|
||||||
{
|
{
|
||||||
@ -455,21 +456,23 @@ public class ScannerService : IScannerService
|
|||||||
NormalizedName = Scanner.Parser.Parser.Normalize(pf.Series),
|
NormalizedName = Scanner.Parser.Parser.Normalize(pf.Series),
|
||||||
Format = pf.Format
|
Format = pf.Format
|
||||||
}));
|
}));
|
||||||
return;
|
return Task.CompletedTask;
|
||||||
}
|
}
|
||||||
|
|
||||||
totalFiles += parsedFiles.Count;
|
totalFiles += parsedFiles.Count;
|
||||||
|
|
||||||
|
|
||||||
seenSeries.Add(foundParsedSeries);
|
seenSeries.Add(foundParsedSeries);
|
||||||
await _processSeries.ProcessSeriesAsync(parsedFiles, library);
|
processTasks.Add(async () => await _processSeries.ProcessSeriesAsync(parsedFiles, library));
|
||||||
|
return Task.CompletedTask;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
var scanElapsedTime = await ScanFiles(library, libraryFolderPaths, shouldUseLibraryScan, TrackFiles, forceUpdate);
|
var scanElapsedTime = await ScanFiles(library, libraryFolderPaths, shouldUseLibraryScan, TrackFiles, forceUpdate);
|
||||||
|
|
||||||
|
foreach (var task in processTasks)
|
||||||
await Task.WhenAll(processTasks);
|
{
|
||||||
|
await task();
|
||||||
|
}
|
||||||
|
|
||||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
|
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent(string.Empty, library.Name, ProgressEventType.Ended));
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="mb-3">
|
<div class="mb-3">
|
||||||
<label for="folder-watching" class="form-label" aria-describedby="folder-watching-info">Folder Watching</label><app-tag-badge [selectionMode]="TagBadgeCursor.Clickable" class="ms-1" ngbTooltip="This feature may not always work reliably">Experimental</app-tag-badge>
|
<label for="folder-watching" class="form-label" aria-describedby="folder-watching-info">Folder Watching</label>
|
||||||
<p class="accent" id="folder-watching-info">Allows Kavita to monitor Library Folders to detect changes and invoke scanning on those changes. This allows content to be updated without manually invoking scans or waiting for nightly scans.</p>
|
<p class="accent" id="folder-watching-info">Allows Kavita to monitor Library Folders to detect changes and invoke scanning on those changes. This allows content to be updated without manually invoking scans or waiting for nightly scans.</p>
|
||||||
<div class="form-check form-switch">
|
<div class="form-check form-switch">
|
||||||
<input id="folder-watching" type="checkbox" class="form-check-input" formControlName="enableFolderWatching" role="switch">
|
<input id="folder-watching" type="checkbox" class="form-check-input" formControlName="enableFolderWatching" role="switch">
|
||||||
|
@ -70,7 +70,7 @@
|
|||||||
<ng-template ngbNavContent>
|
<ng-template ngbNavContent>
|
||||||
|
|
||||||
<div class="row g-0">
|
<div class="row g-0">
|
||||||
<div class="col-md-12">
|
<div class="col-lg-8 col-md-12 pe-2">
|
||||||
<div class="mb-3">
|
<div class="mb-3">
|
||||||
<label for="collections" class="form-label">Collections </label>
|
<label for="collections" class="form-label">Collections </label>
|
||||||
<app-typeahead (selectedData)="updateCollections($event)" [settings]="collectionTagSettings" [locked]="true">
|
<app-typeahead (selectedData)="updateCollections($event)" [settings]="collectionTagSettings" [locked]="true">
|
||||||
@ -83,6 +83,20 @@
|
|||||||
</app-typeahead>
|
</app-typeahead>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="col-lg-4 col-md-12">
|
||||||
|
<div class="mb-3" style="width: 100%">
|
||||||
|
<label for="release-year" class="form-label">Release Year</label>
|
||||||
|
<div class="input-group {{metadata.releaseYearLocked ? 'lock-active' : ''}}">
|
||||||
|
<ng-container [ngTemplateOutlet]="lock" [ngTemplateOutletContext]="{ item: metadata, field: 'releaseYearLocked' }"></ng-container>
|
||||||
|
<input type="number" class="form-control" id="release-year" formControlName="releaseYear" maxlength="4" minlength="4" [class.is-invalid]="editSeriesForm.get('releaseYear')?.invalid && editSeriesForm.get('releaseYear')?.touched">
|
||||||
|
<ng-container *ngIf="editSeriesForm.get('releaseYear')?.errors as errors">
|
||||||
|
<p class="invalid-feedback" *ngIf="errors.pattern">
|
||||||
|
This must be a valid year greater than 1000 and 4 characters long
|
||||||
|
</p>
|
||||||
|
</ng-container>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="row g-0">
|
<div class="row g-0">
|
||||||
@ -137,7 +151,7 @@
|
|||||||
</app-typeahead>
|
</app-typeahead>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-lg-4 col-md-12 pe-2">
|
<div class="col-lg-4 col-md-12 pe-2">
|
||||||
<div class="mb-3">
|
<div class="mb-3">
|
||||||
<label for="age-rating" class="form-label">Age Rating</label>
|
<label for="age-rating" class="form-label">Age Rating</label>
|
||||||
<div class="input-group {{metadata.ageRatingLocked ? 'lock-active' : ''}}">
|
<div class="input-group {{metadata.ageRatingLocked ? 'lock-active' : ''}}">
|
||||||
|
@ -137,6 +137,7 @@ export class EditSeriesModalComponent implements OnInit, OnDestroy {
|
|||||||
ageRating: new FormControl('', []),
|
ageRating: new FormControl('', []),
|
||||||
publicationStatus: new FormControl('', []),
|
publicationStatus: new FormControl('', []),
|
||||||
language: new FormControl('', []),
|
language: new FormControl('', []),
|
||||||
|
releaseYear: new FormControl('', [Validators.minLength(4), Validators.maxLength(4), Validators.pattern(/[1-9]\d{3}/)]),
|
||||||
});
|
});
|
||||||
this.cdRef.markForCheck();
|
this.cdRef.markForCheck();
|
||||||
|
|
||||||
@ -165,6 +166,7 @@ export class EditSeriesModalComponent implements OnInit, OnDestroy {
|
|||||||
this.editSeriesForm.get('ageRating')?.patchValue(this.metadata.ageRating);
|
this.editSeriesForm.get('ageRating')?.patchValue(this.metadata.ageRating);
|
||||||
this.editSeriesForm.get('publicationStatus')?.patchValue(this.metadata.publicationStatus);
|
this.editSeriesForm.get('publicationStatus')?.patchValue(this.metadata.publicationStatus);
|
||||||
this.editSeriesForm.get('language')?.patchValue(this.metadata.language);
|
this.editSeriesForm.get('language')?.patchValue(this.metadata.language);
|
||||||
|
this.editSeriesForm.get('releaseYear')?.patchValue(this.metadata.releaseYear);
|
||||||
this.cdRef.markForCheck();
|
this.cdRef.markForCheck();
|
||||||
|
|
||||||
this.editSeriesForm.get('name')?.valueChanges.pipe(takeUntil(this.onDestroy)).subscribe(val => {
|
this.editSeriesForm.get('name')?.valueChanges.pipe(takeUntil(this.onDestroy)).subscribe(val => {
|
||||||
@ -200,6 +202,12 @@ export class EditSeriesModalComponent implements OnInit, OnDestroy {
|
|||||||
this.metadata.publicationStatusLocked = true;
|
this.metadata.publicationStatusLocked = true;
|
||||||
this.cdRef.markForCheck();
|
this.cdRef.markForCheck();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
this.editSeriesForm.get('releaseYear')?.valueChanges.pipe(takeUntil(this.onDestroy)).subscribe(val => {
|
||||||
|
this.metadata.releaseYear = parseInt(val + '', 10);
|
||||||
|
this.metadata.releaseYearLocked = true;
|
||||||
|
this.cdRef.markForCheck();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user