mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-07-09 03:04:19 -04:00
Scan Loop Fortification (#1573)
* Cleanup some messaging in the scan loop to be more context bearing * Added Response Caching to Series Detail for 1 min, due to the heavy nature of the call. * Refactored code to make it so that processing of series runs sync correctly. Added a log to inform the user of corrupted volume from buggy code in v0.5.6. * Moved folder watching out of experimental * Fixed an issue where empty folders could break the scan loop * Another fix for when dates aren't valid, the scanner wouldn't get the proper min and would throw an exception (develop) * Implemented the ability to edit release year from the UI for a series. * Added a unit test for some new logic * Code smells * Rewrote the handler for suspending watching to be more resilient and ensure no two threads have a race condition. * More error handling for when a ScanFolder is invoked but multiple series belong to that folder, log it to the user and default to a library scan. * ScanSeries now will check for kavitaignores higher than it's own folder and respect library level. * Fixed an issue where image series with a folder name containing the word "folder" could get ignored as it thought the image was a cover image. When a series folder is moved or deleted, skip parent ignore finding. * Removed some old files, added in scanFolder a check if the series found for a folder is in a book library and if so to always do a library scan (as books are often nested into one folder with multiple series). Added some unit tests * Refactored some scan loop logic into ComicInfo, wrote tests and updated some documentation to make the fields more clear. * Added a test for GetLastWriteTime based on recent bug * Cleaned up some redundant code * Fixed a bad merge * Code smells * Removed a package that's no longer used. * Ensure we check against ScanQueue on ScanFolder enqueuing * Documentation and more bullet proofing to ensure Hangfire checks work more as expected
This commit is contained in:
parent
5a75a204db
commit
6ea9f2c73e
1
.gitignore
vendored
1
.gitignore
vendored
@ -530,3 +530,4 @@ API.Tests/TestResults/
|
||||
UI/Web/.vscode/settings.json
|
||||
/API.Tests/Services/Test Data/ArchiveService/CoverImages/output/*
|
||||
UI/Web/.angular/
|
||||
BenchmarkDotNet.Artifacts
|
@ -35,4 +35,62 @@ public class ComicInfoTests
|
||||
Assert.Equal(AgeRating.RatingPending, ComicInfo.ConvertAgeRatingToEnum("rating pending"));
|
||||
}
|
||||
#endregion
|
||||
|
||||
|
||||
#region CalculatedCount
|
||||
|
||||
[Fact]
|
||||
public void CalculatedCount_ReturnsVolumeCount()
|
||||
{
|
||||
var ci = new ComicInfo()
|
||||
{
|
||||
Number = "5",
|
||||
Volume = "10",
|
||||
Count = 10
|
||||
};
|
||||
|
||||
Assert.Equal(5, ci.CalculatedCount());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculatedCount_ReturnsNoCountWhenCountNotSet()
|
||||
{
|
||||
var ci = new ComicInfo()
|
||||
{
|
||||
Number = "5",
|
||||
Volume = "10",
|
||||
Count = 0
|
||||
};
|
||||
|
||||
Assert.Equal(5, ci.CalculatedCount());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculatedCount_ReturnsNumberCount()
|
||||
{
|
||||
var ci = new ComicInfo()
|
||||
{
|
||||
Number = "5",
|
||||
Volume = "",
|
||||
Count = 10
|
||||
};
|
||||
|
||||
Assert.Equal(5, ci.CalculatedCount());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculatedCount_ReturnsNumberCount_OnlyWholeNumber()
|
||||
{
|
||||
var ci = new ComicInfo()
|
||||
{
|
||||
Number = "5.7",
|
||||
Volume = "",
|
||||
Count = 10
|
||||
};
|
||||
|
||||
Assert.Equal(5, ci.CalculatedCount());
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using API.Entities;
|
||||
@ -108,7 +109,6 @@ public class ChapterListExtensionsTests
|
||||
var actualChapter = chapterList.GetChapterByRange(info);
|
||||
|
||||
Assert.Equal(chapterList[0], actualChapter);
|
||||
|
||||
}
|
||||
|
||||
#region GetFirstChapterWithFiles
|
||||
@ -140,5 +140,47 @@ public class ChapterListExtensionsTests
|
||||
}
|
||||
|
||||
|
||||
#endregion
|
||||
|
||||
#region MinimumReleaseYear
|
||||
|
||||
[Fact]
|
||||
public void MinimumReleaseYear_ZeroIfNoChapters()
|
||||
{
|
||||
var chapterList = new List<Chapter>();
|
||||
|
||||
Assert.Equal(0, chapterList.MinimumReleaseYear());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MinimumReleaseYear_ZeroIfNoValidDates()
|
||||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
chapterList[0].ReleaseDate = new DateTime(10, 1, 1);
|
||||
chapterList[1].ReleaseDate = DateTime.MinValue;
|
||||
|
||||
Assert.Equal(0, chapterList.MinimumReleaseYear());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MinimumReleaseYear_MinValidReleaseYear()
|
||||
{
|
||||
var chapterList = new List<Chapter>()
|
||||
{
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true),
|
||||
CreateChapter("detective comics", "0", CreateFile("/manga/detective comics #001.cbz", MangaFormat.Archive), true)
|
||||
};
|
||||
|
||||
chapterList[0].ReleaseDate = new DateTime(2002, 1, 1);
|
||||
chapterList[1].ReleaseDate = new DateTime(2012, 2, 1);
|
||||
|
||||
Assert.Equal(2002, chapterList.MinimumReleaseYear());
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
@ -103,6 +103,7 @@ public class DefaultParserTests
|
||||
{
|
||||
const string rootPath = @"E:/Manga/";
|
||||
var expected = new Dictionary<string, ParserInfo>();
|
||||
|
||||
var filepath = @"E:/Manga/Mujaki no Rakuen/Mujaki no Rakuen Vol12 ch76.cbz";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
@ -215,14 +216,6 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
|
||||
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
// If an image is cover exclusively, ignore it
|
||||
filepath = @"E:\Manga\Seraph of the End\cover.png";
|
||||
expected.Add(filepath, null);
|
||||
@ -235,11 +228,12 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
// Note: Fallback to folder will parse Monster #8 and get Monster
|
||||
filepath = @"E:\Manga\Monster #8\Ch. 001-016 [MangaPlus] [Digital] [amit34521]\Monster #8 Ch. 001 [MangaPlus] [Digital] [amit34521]\13.jpg";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Monster #8", Volumes = "0", Edition = "",
|
||||
Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Archive,
|
||||
Series = "Monster", Volumes = "0", Edition = "",
|
||||
Chapters = "1", Filename = "13.jpg", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
@ -251,6 +245,29 @@ public class DefaultParserTests
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Vol19\ch186\Vol. 19 p106.gif";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Extra layer for no reason\Just Images the second\Blank Folder\Vol19\ch186\Vol. 19 p106.gif";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Just Images the second", Volumes = "19", Edition = "",
|
||||
Chapters = "186", Filename = "Vol. 19 p106.gif", Format = MangaFormat.Image,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
|
||||
expected.Add(filepath, new ParserInfo
|
||||
{
|
||||
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
|
||||
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
|
||||
FullFilePath = filepath, IsSpecial = false
|
||||
});
|
||||
|
||||
foreach (var file in expected.Keys)
|
||||
{
|
||||
@ -259,7 +276,7 @@ public class DefaultParserTests
|
||||
if (expectedInfo == null)
|
||||
{
|
||||
Assert.Null(actual);
|
||||
return;
|
||||
continue;
|
||||
}
|
||||
Assert.NotNull(actual);
|
||||
_testOutputHelper.WriteLine($"Validating {file}");
|
||||
@ -399,7 +416,7 @@ public class DefaultParserTests
|
||||
if (expectedInfo == null)
|
||||
{
|
||||
Assert.Null(actual);
|
||||
return;
|
||||
continue;
|
||||
}
|
||||
Assert.NotNull(actual);
|
||||
_testOutputHelper.WriteLine($"Validating {file}");
|
||||
|
@ -995,4 +995,20 @@ public class DirectoryServiceTests
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetLastWriteTime
|
||||
|
||||
[Fact]
|
||||
public void GetLastWriteTime_ShouldReturnMaxTime_IfNoFiles()
|
||||
{
|
||||
const string dir = "C:/manga/";
|
||||
var filesystem = new MockFileSystem();
|
||||
filesystem.AddDirectory("C:/");
|
||||
filesystem.AddDirectory(dir);
|
||||
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
|
||||
|
||||
Assert.Equal(DateTime.MaxValue, ds.GetLastWriteTime(dir));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
@ -124,5 +124,23 @@ public class ComicInfo
|
||||
info.CoverArtist = Services.Tasks.Scanner.Parser.Parser.CleanAuthor(info.CoverArtist);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Uses both Volume and Number to make an educated guess as to what count refers to and it's highest number.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
public int CalculatedCount()
|
||||
{
|
||||
if (!string.IsNullOrEmpty(Number) && float.Parse(Number) > 0)
|
||||
{
|
||||
return (int) Math.Floor(float.Parse(Number));
|
||||
}
|
||||
if (!string.IsNullOrEmpty(Volume) && float.Parse(Volume) > 0)
|
||||
{
|
||||
return Math.Max(Count, (int) Math.Floor(float.Parse(Volume)));
|
||||
}
|
||||
|
||||
return Count;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -33,8 +33,7 @@ public enum SeriesIncludes
|
||||
Volumes = 2,
|
||||
Metadata = 4,
|
||||
Related = 8,
|
||||
//Related = 16,
|
||||
//UserPreferences = 32
|
||||
Library = 16,
|
||||
}
|
||||
|
||||
internal class RecentlyAddedSeries
|
||||
@ -120,8 +119,7 @@ public interface ISeriesRepository
|
||||
Task<SeriesDto> GetSeriesForChapter(int chapterId, int userId);
|
||||
Task<PagedList<SeriesDto>> GetWantToReadForUserAsync(int userId, UserParams userParams, FilterDto filter);
|
||||
Task<int> GetSeriesIdByFolder(string folder);
|
||||
Task<Series> GetSeriesByFolderPath(string folder);
|
||||
Task<Series> GetFullSeriesByName(string series, int libraryId);
|
||||
Task<Series> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None);
|
||||
Task<Series> GetFullSeriesByAnyName(string seriesName, string localizedName, int libraryId, MangaFormat format, bool withFullIncludes = true);
|
||||
Task<List<Series>> RemoveSeriesNotInList(IList<ParsedSeries> seenSeries, int libraryId);
|
||||
Task<IDictionary<string, IList<SeriesModified>>> GetFolderPathMap(int libraryId);
|
||||
@ -1173,52 +1171,16 @@ public class SeriesRepository : ISeriesRepository
|
||||
/// Return a Series by Folder path. Null if not found.
|
||||
/// </summary>
|
||||
/// <param name="folder">This will be normalized in the query</param>
|
||||
/// <param name="includes">Additional relationships to include with the base query</param>
|
||||
/// <returns></returns>
|
||||
public async Task<Series> GetSeriesByFolderPath(string folder)
|
||||
public async Task<Series> GetSeriesByFolderPath(string folder, SeriesIncludes includes = SeriesIncludes.None)
|
||||
{
|
||||
var normalized = Services.Tasks.Scanner.Parser.Parser.NormalizePath(folder);
|
||||
return await _context.Series.SingleOrDefaultAsync(s => s.FolderPath.Equals(normalized));
|
||||
}
|
||||
var query = _context.Series.Where(s => s.FolderPath.Equals(normalized));
|
||||
|
||||
/// <summary>
|
||||
/// Finds a series by series name for a given library.
|
||||
/// </summary>
|
||||
/// <remarks>This pulls everything with the Series, so should be used only when needing tracking on all related tables</remarks>
|
||||
/// <param name="series"></param>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <returns></returns>
|
||||
public Task<Series> GetFullSeriesByName(string series, int libraryId)
|
||||
{
|
||||
var localizedSeries = Services.Tasks.Scanner.Parser.Parser.Normalize(series);
|
||||
return _context.Series
|
||||
.Where(s => (s.NormalizedName.Equals(localizedSeries)
|
||||
|| s.LocalizedName.Equals(series)) && s.LibraryId == libraryId)
|
||||
.Include(s => s.Metadata)
|
||||
.ThenInclude(m => m.People)
|
||||
.Include(s => s.Metadata)
|
||||
.ThenInclude(m => m.Genres)
|
||||
.Include(s => s.Library)
|
||||
.Include(s => s.Volumes)
|
||||
.ThenInclude(v => v.Chapters)
|
||||
.ThenInclude(cm => cm.People)
|
||||
query = AddIncludesToQuery(query, includes);
|
||||
|
||||
.Include(s => s.Volumes)
|
||||
.ThenInclude(v => v.Chapters)
|
||||
.ThenInclude(c => c.Tags)
|
||||
|
||||
.Include(s => s.Volumes)
|
||||
.ThenInclude(v => v.Chapters)
|
||||
.ThenInclude(c => c.Genres)
|
||||
|
||||
|
||||
.Include(s => s.Metadata)
|
||||
.ThenInclude(m => m.Tags)
|
||||
|
||||
.Include(s => s.Volumes)
|
||||
.ThenInclude(v => v.Chapters)
|
||||
.ThenInclude(c => c.Files)
|
||||
.AsSplitQuery()
|
||||
.SingleOrDefaultAsync();
|
||||
return await query.SingleOrDefaultAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@ -1240,6 +1202,7 @@ public class SeriesRepository : ISeriesRepository
|
||||
.Where(s => s.Format == format && format != MangaFormat.Unknown)
|
||||
.Where(s => s.NormalizedName.Equals(normalizedSeries)
|
||||
|| (s.NormalizedLocalizedName.Equals(normalizedSeries) && s.NormalizedLocalizedName != string.Empty));
|
||||
|
||||
if (!string.IsNullOrEmpty(normalizedLocalized))
|
||||
{
|
||||
query = query.Where(s =>
|
||||
@ -1516,7 +1479,8 @@ public class SeriesRepository : ISeriesRepository
|
||||
LastScanned = s.LastFolderScanned,
|
||||
SeriesName = s.Name,
|
||||
FolderPath = s.FolderPath,
|
||||
Format = s.Format
|
||||
Format = s.Format,
|
||||
LibraryRoots = s.Library.Folders.Select(f => f.Path)
|
||||
}).ToListAsync();
|
||||
|
||||
var map = new Dictionary<string, IList<SeriesModified>>();
|
||||
@ -1538,4 +1502,30 @@ public class SeriesRepository : ISeriesRepository
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
private static IQueryable<Series> AddIncludesToQuery(IQueryable<Series> query, SeriesIncludes includeFlags)
|
||||
{
|
||||
if (includeFlags.HasFlag(SeriesIncludes.Library))
|
||||
{
|
||||
query = query.Include(u => u.Library);
|
||||
}
|
||||
|
||||
if (includeFlags.HasFlag(SeriesIncludes.Related))
|
||||
{
|
||||
query = query.Include(u => u.Relations);
|
||||
}
|
||||
|
||||
if (includeFlags.HasFlag(SeriesIncludes.Metadata))
|
||||
{
|
||||
query = query.Include(u => u.Metadata);
|
||||
}
|
||||
|
||||
if (includeFlags.HasFlag(SeriesIncludes.Volumes))
|
||||
{
|
||||
query = query.Include(u => u.Volumes);
|
||||
}
|
||||
|
||||
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
@ -65,11 +65,12 @@ public class Chapter : IEntityDate, IHasReadTimeEstimate
|
||||
/// </summary>
|
||||
public string Language { get; set; }
|
||||
/// <summary>
|
||||
/// Total number of issues in the series
|
||||
/// Total number of issues or volumes in the series
|
||||
/// </summary>
|
||||
/// <remarks>Users may use Volume count or issue count. Kavita performs some light logic to help Count match up with TotalCount</remarks>
|
||||
public int TotalCount { get; set; } = 0;
|
||||
/// <summary>
|
||||
/// Number in the Total Count
|
||||
/// Number of the Total Count (progress the Series is complete)
|
||||
/// </summary>
|
||||
public int Count { get; set; } = 0;
|
||||
|
||||
|
@ -31,4 +31,14 @@ public static class ChapterListExtensions
|
||||
? chapters.FirstOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath)))
|
||||
: chapters.FirstOrDefault(c => c.Range == info.Chapters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the minimum Release Year from all Chapters that meets the year requirement (>= 1000)
|
||||
/// </summary>
|
||||
/// <param name="chapters"></param>
|
||||
/// <returns></returns>
|
||||
public static int MinimumReleaseYear(this IList<Chapter> chapters)
|
||||
{
|
||||
return chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).DefaultIfEmpty().Min();
|
||||
}
|
||||
}
|
||||
|
@ -651,7 +651,7 @@ public class DirectoryService : IDirectoryService
|
||||
public DateTime GetLastWriteTime(string folderPath)
|
||||
{
|
||||
if (!FileSystem.Directory.Exists(folderPath)) throw new IOException($"{folderPath} does not exist");
|
||||
var fileEntries = Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories);
|
||||
var fileEntries = FileSystem.Directory.GetFileSystemEntries(folderPath, "*.*", SearchOption.AllDirectories);
|
||||
if (fileEntries.Length == 0) return DateTime.MaxValue;
|
||||
return fileEntries.Max(path => FileSystem.File.GetLastWriteTime(path));
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Entities.Enums;
|
||||
@ -18,6 +19,7 @@ public interface ITaskScheduler
|
||||
Task ScheduleTasks();
|
||||
Task ScheduleStatsTasks();
|
||||
void ScheduleUpdaterTasks();
|
||||
void ScanFolder(string folderPath, TimeSpan delay);
|
||||
void ScanFolder(string folderPath);
|
||||
void ScanLibrary(int libraryId, bool force = false);
|
||||
void CleanupChapters(int[] chapterIds);
|
||||
@ -179,9 +181,32 @@ public class TaskScheduler : ITaskScheduler
|
||||
RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Daily(Rnd.Next(12, 18)), TimeZoneInfo.Local);
|
||||
}
|
||||
|
||||
public void ScanFolder(string folderPath, TimeSpan delay)
|
||||
{
|
||||
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] { normalizedFolder }))
|
||||
{
|
||||
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
|
||||
normalizedFolder);
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
|
||||
BackgroundJob.Schedule(() => _scannerService.ScanFolder(normalizedFolder), delay);
|
||||
}
|
||||
|
||||
public void ScanFolder(string folderPath)
|
||||
{
|
||||
_scannerService.ScanFolder(Tasks.Scanner.Parser.Parser.NormalizePath(folderPath));
|
||||
var normalizedFolder = Tasks.Scanner.Parser.Parser.NormalizePath(folderPath);
|
||||
if (HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {normalizedFolder}))
|
||||
{
|
||||
_logger.LogInformation("Skipped scheduling ScanFolder for {Folder} as a job already queued",
|
||||
normalizedFolder);
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Scheduling ScanFolder for {Folder}", normalizedFolder);
|
||||
_scannerService.ScanFolder(normalizedFolder);
|
||||
}
|
||||
|
||||
#endregion
|
||||
@ -298,15 +323,32 @@ public class TaskScheduler : ITaskScheduler
|
||||
await _versionUpdaterService.PushUpdate(update);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// If there is an enqueued or scheduled tak for <see cref="ScannerService.ScanLibrary"/> method
|
||||
/// </summary>
|
||||
/// <param name="libraryId"></param>
|
||||
/// <returns></returns>
|
||||
public static bool HasScanTaskRunningForLibrary(int libraryId)
|
||||
{
|
||||
return
|
||||
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, true}, ScanQueue) ||
|
||||
HasAlreadyEnqueuedTask("ScannerService", "ScanLibrary", new object[] {libraryId, false}, ScanQueue);
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, true}, ScanQueue) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanLibrary", new object[] {libraryId, false}, ScanQueue);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if this same invocation is already enqueued
|
||||
/// If there is an enqueued or scheduled tak for <see cref="ScannerService.ScanSeries"/> method
|
||||
/// </summary>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <returns></returns>
|
||||
public static bool HasScanTaskRunningForSeries(int seriesId)
|
||||
{
|
||||
return
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, true}, ScanQueue) ||
|
||||
HasAlreadyEnqueuedTask(ScannerService.Name, "ScanSeries", new object[] {seriesId, false}, ScanQueue);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if this same invocation is already enqueued or scheduled
|
||||
/// </summary>
|
||||
/// <param name="methodName">Method name that was enqueued</param>
|
||||
/// <param name="className">Class name the method resides on</param>
|
||||
@ -316,16 +358,33 @@ public class TaskScheduler : ITaskScheduler
|
||||
public static bool HasAlreadyEnqueuedTask(string className, string methodName, object[] args, string queue = DefaultQueue)
|
||||
{
|
||||
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
|
||||
return enqueuedJobs.Any(j => j.Value.InEnqueuedState &&
|
||||
var ret = enqueuedJobs.Any(j => j.Value.InEnqueuedState &&
|
||||
j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) &&
|
||||
j.Value.Job.Method.Name.Equals(methodName) &&
|
||||
j.Value.Job.Method.DeclaringType.Name.Equals(className));
|
||||
if (ret) return true;
|
||||
|
||||
var scheduledJobs = JobStorage.Current.GetMonitoringApi().ScheduledJobs(0, int.MaxValue);
|
||||
return scheduledJobs.Any(j =>
|
||||
j.Value.Job.Method.DeclaringType != null && j.Value.Job.Args.SequenceEqual(args) &&
|
||||
j.Value.Job.Method.Name.Equals(methodName) &&
|
||||
j.Value.Job.Method.DeclaringType.Name.Equals(className));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks against any jobs that are running or about to run
|
||||
/// </summary>
|
||||
/// <param name="classNames"></param>
|
||||
/// <param name="queue"></param>
|
||||
/// <returns></returns>
|
||||
public static bool RunningAnyTasksByMethod(IEnumerable<string> classNames, string queue = DefaultQueue)
|
||||
{
|
||||
var enqueuedJobs = JobStorage.Current.GetMonitoringApi().EnqueuedJobs(queue, 0, int.MaxValue);
|
||||
return enqueuedJobs.Any(j => !j.Value.InEnqueuedState &&
|
||||
var ret = enqueuedJobs.Any(j => !j.Value.InEnqueuedState &&
|
||||
classNames.Contains(j.Value.Job.Method.DeclaringType?.Name));
|
||||
if (ret) return true;
|
||||
|
||||
var runningJobs = JobStorage.Current.GetMonitoringApi().ProcessingJobs(0, int.MaxValue);
|
||||
return runningJobs.Any(j => classNames.Contains(j.Value.Job.Method.DeclaringType?.Name));
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
private readonly IDirectoryService _directoryService;
|
||||
private readonly IUnitOfWork _unitOfWork;
|
||||
private readonly ILogger<LibraryWatcher> _logger;
|
||||
private readonly IScannerService _scannerService;
|
||||
private readonly ITaskScheduler _taskScheduler;
|
||||
|
||||
private static readonly Dictionary<string, IList<FileSystemWatcher>> WatcherDictionary = new ();
|
||||
/// <summary>
|
||||
@ -54,18 +54,19 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
/// <summary>
|
||||
/// Counts within a time frame how many times the buffer became full. Is used to reschedule LibraryWatcher to start monitoring much later rather than instantly
|
||||
/// </summary>
|
||||
private int _bufferFullCounter = 0;
|
||||
private int _bufferFullCounter;
|
||||
/// <summary>
|
||||
/// Used to lock buffer Full Counter
|
||||
/// </summary>
|
||||
private static readonly object Lock = new ();
|
||||
|
||||
private DateTime _lastBufferOverflow = DateTime.MinValue;
|
||||
|
||||
|
||||
|
||||
public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger<LibraryWatcher> logger, IScannerService scannerService, IHostEnvironment environment)
|
||||
public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork,
|
||||
ILogger<LibraryWatcher> logger, IHostEnvironment environment, ITaskScheduler taskScheduler)
|
||||
{
|
||||
_directoryService = directoryService;
|
||||
_unitOfWork = unitOfWork;
|
||||
_logger = logger;
|
||||
_scannerService = scannerService;
|
||||
_taskScheduler = taskScheduler;
|
||||
|
||||
_queueWaitTime = environment.IsDevelopment() ? TimeSpan.FromSeconds(30) : TimeSpan.FromMinutes(5);
|
||||
|
||||
@ -91,8 +92,8 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
watcher.Created += OnCreated;
|
||||
watcher.Deleted += OnDeleted;
|
||||
watcher.Error += OnError;
|
||||
watcher.Disposed += (sender, args) =>
|
||||
_logger.LogError("[LibraryWatcher] watcher was disposed when it shouldn't have been");
|
||||
watcher.Disposed += (_, _) =>
|
||||
_logger.LogError("[LibraryWatcher] watcher was disposed when it shouldn't have been. Please report this to Kavita dev");
|
||||
|
||||
watcher.Filter = "*.*";
|
||||
watcher.IncludeSubdirectories = true;
|
||||
@ -127,16 +128,14 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
{
|
||||
_logger.LogDebug("[LibraryWatcher] Restarting watcher");
|
||||
|
||||
UpdateBufferOverflow();
|
||||
|
||||
StopWatching();
|
||||
await StartWatching();
|
||||
}
|
||||
|
||||
private void OnChanged(object sender, FileSystemEventArgs e)
|
||||
{
|
||||
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}, {ChangeType}", e.FullPath, e.Name, e.ChangeType);
|
||||
if (e.ChangeType != WatcherChangeTypes.Changed) return;
|
||||
_logger.LogDebug("[LibraryWatcher] Changed: {FullPath}, {Name}", e.FullPath, e.Name);
|
||||
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, string.IsNullOrEmpty(_directoryService.FileSystem.Path.GetExtension(e.Name))));
|
||||
}
|
||||
|
||||
@ -158,20 +157,31 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
BackgroundJob.Enqueue(() => ProcessChange(e.FullPath, true));
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// On error, we count the number of errors that have occured. If the number of errors has been more than 2 in last 10 minutes, then we suspend listening for an hour
|
||||
/// </summary>
|
||||
/// <remarks>This will schedule jobs to decrement the buffer full counter</remarks>
|
||||
/// <param name="sender"></param>
|
||||
/// <param name="e"></param>
|
||||
private void OnError(object sender, ErrorEventArgs e)
|
||||
{
|
||||
_logger.LogError(e.GetException(), "[LibraryWatcher] An error occured, likely too many changes occured at once or the folder being watched was deleted. Restarting Watchers");
|
||||
_bufferFullCounter += 1;
|
||||
_lastBufferOverflow = DateTime.Now;
|
||||
bool condition;
|
||||
lock (Lock)
|
||||
{
|
||||
_bufferFullCounter += 1;
|
||||
condition = _bufferFullCounter >= 3;
|
||||
}
|
||||
|
||||
if (_bufferFullCounter >= 3)
|
||||
if (condition)
|
||||
{
|
||||
_logger.LogInformation("[LibraryWatcher] Internal buffer has been overflown multiple times in past 10 minutes. Suspending file watching for an hour");
|
||||
StopWatching();
|
||||
BackgroundJob.Schedule(() => RestartWatching(), TimeSpan.FromHours(1));
|
||||
return;
|
||||
}
|
||||
Task.Run(RestartWatching);
|
||||
BackgroundJob.Schedule(() => UpdateLastBufferOverflow(), TimeSpan.FromMinutes(10));
|
||||
}
|
||||
|
||||
|
||||
@ -185,8 +195,6 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public async Task ProcessChange(string filePath, bool isDirectoryChange = false)
|
||||
{
|
||||
UpdateBufferOverflow();
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
_logger.LogDebug("[LibraryWatcher] Processing change of {FilePath}", filePath);
|
||||
try
|
||||
@ -214,29 +222,16 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if this task has already enqueued or is being processed, before enqueing
|
||||
|
||||
var alreadyScheduled =
|
||||
TaskScheduler.HasAlreadyEnqueuedTask(ScannerService.Name, "ScanFolder", new object[] {fullPath});
|
||||
if (!alreadyScheduled)
|
||||
{
|
||||
_logger.LogInformation("[LibraryWatcher] Scheduling ScanFolder for {Folder}", fullPath);
|
||||
BackgroundJob.Schedule(() => _scannerService.ScanFolder(fullPath), _queueWaitTime);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation("[LibraryWatcher] Skipped scheduling ScanFolder for {Folder} as a job already queued",
|
||||
fullPath);
|
||||
}
|
||||
_taskScheduler.ScanFolder(fullPath, _queueWaitTime);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "[LibraryWatcher] An error occured when processing a watch event");
|
||||
}
|
||||
_logger.LogDebug("[LibraryWatcher] ProcessChange ran in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
|
||||
_logger.LogDebug("[LibraryWatcher] ProcessChange completed in {ElapsedMilliseconds}ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
private string GetFolder(string filePath, IList<string> libraryFolders)
|
||||
private string GetFolder(string filePath, IEnumerable<string> libraryFolders)
|
||||
{
|
||||
var parentDirectory = _directoryService.GetParentDirectoryName(filePath);
|
||||
_logger.LogDebug("[LibraryWatcher] Parent Directory: {ParentDirectory}", parentDirectory);
|
||||
@ -256,14 +251,17 @@ public class LibraryWatcher : ILibraryWatcher
|
||||
return Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.First()));
|
||||
}
|
||||
|
||||
private void UpdateBufferOverflow()
|
||||
|
||||
/// <summary>
|
||||
/// This is called via Hangfire to decrement the counter. Must work around a lock
|
||||
/// </summary>
|
||||
// ReSharper disable once MemberCanBePrivate.Global
|
||||
public void UpdateLastBufferOverflow()
|
||||
{
|
||||
if (_bufferFullCounter == 0) return;
|
||||
// If the last buffer overflow is over 5 mins back, we can remove a buffer count
|
||||
if (_lastBufferOverflow < DateTime.Now.Subtract(TimeSpan.FromMinutes(5)))
|
||||
lock (Lock)
|
||||
{
|
||||
_bufferFullCounter = Math.Min(0, _bufferFullCounter - 1);
|
||||
_lastBufferOverflow = DateTime.Now;
|
||||
if (_bufferFullCounter == 0) return;
|
||||
_bufferFullCounter -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,14 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Parser;
|
||||
using API.SignalR;
|
||||
using Kavita.Common.Helpers;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace API.Services.Tasks.Scanner;
|
||||
@ -39,6 +41,7 @@ public class SeriesModified
|
||||
public string SeriesName { get; set; }
|
||||
public DateTime LastScanned { get; set; }
|
||||
public MangaFormat Format { get; set; }
|
||||
public IEnumerable<string> LibraryRoots { get; set; }
|
||||
}
|
||||
|
||||
|
||||
@ -109,7 +112,41 @@ public class ParseScannedFiles
|
||||
await folderAction(new List<string>(), folderPath);
|
||||
return;
|
||||
}
|
||||
await folderAction(_directoryService.ScanFiles(folderPath), folderPath);
|
||||
// We need to calculate all folders till library root and see if any kavitaignores
|
||||
var seriesMatcher = new GlobMatcher();
|
||||
try
|
||||
{
|
||||
var roots = seriesPaths[folderPath][0].LibraryRoots.Select(Scanner.Parser.Parser.NormalizePath).ToList();
|
||||
var libraryFolder = roots.SingleOrDefault(folderPath.Contains);
|
||||
|
||||
if (string.IsNullOrEmpty(libraryFolder) || !Directory.Exists(folderPath))
|
||||
{
|
||||
await folderAction(_directoryService.ScanFiles(folderPath, seriesMatcher), folderPath);
|
||||
return;
|
||||
}
|
||||
|
||||
var allParents = _directoryService.GetFoldersTillRoot(libraryFolder, folderPath);
|
||||
var path = libraryFolder;
|
||||
|
||||
// Apply the library root level kavitaignore
|
||||
var potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile);
|
||||
seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile));
|
||||
|
||||
// Then apply kavitaignores for each folder down to where the series folder is
|
||||
foreach (var folderPart in allParents.Reverse())
|
||||
{
|
||||
path = Parser.Parser.NormalizePath(Path.Join(libraryFolder, folderPart));
|
||||
potentialIgnoreFile = _directoryService.FileSystem.Path.Join(path, DirectoryService.KavitaIgnoreFile);
|
||||
seriesMatcher.Merge(_directoryService.CreateMatcherFromFile(potentialIgnoreFile));
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "There was an error trying to find and apply .kavitaignores above the Series Folder. Scanning without them present");
|
||||
}
|
||||
|
||||
|
||||
await folderAction(_directoryService.ScanFiles(folderPath, seriesMatcher), folderPath);
|
||||
}
|
||||
|
||||
|
||||
|
@ -62,7 +62,7 @@ public class DefaultParser : IDefaultParser
|
||||
};
|
||||
}
|
||||
|
||||
if (Parser.IsCoverImage(filePath)) return null;
|
||||
if (Parser.IsCoverImage(_directoryService.FileSystem.Path.GetFileName(filePath))) return null;
|
||||
|
||||
if (Parser.IsImage(filePath))
|
||||
{
|
||||
|
@ -238,13 +238,7 @@ public class ProcessSeries : IProcessSeries
|
||||
// Update Metadata based on Chapter metadata
|
||||
if (!series.Metadata.ReleaseYearLocked)
|
||||
{
|
||||
series.Metadata.ReleaseYear = chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).DefaultIfEmpty().Min();
|
||||
|
||||
if (series.Metadata.ReleaseYear < 1000)
|
||||
{
|
||||
// Not a valid year, default to 0
|
||||
series.Metadata.ReleaseYear = 0;
|
||||
}
|
||||
series.Metadata.ReleaseYear = chapters.MinimumReleaseYear();
|
||||
}
|
||||
|
||||
// Set the AgeRating as highest in all the comicInfos
|
||||
@ -637,14 +631,7 @@ public class ProcessSeries : IProcessSeries
|
||||
}
|
||||
|
||||
// This needs to check against both Number and Volume to calculate Count
|
||||
if (!string.IsNullOrEmpty(comicInfo.Number) && float.Parse(comicInfo.Number) > 0)
|
||||
{
|
||||
chapter.Count = (int) Math.Floor(float.Parse(comicInfo.Number));
|
||||
}
|
||||
if (!string.IsNullOrEmpty(comicInfo.Volume) && float.Parse(comicInfo.Volume) > 0)
|
||||
{
|
||||
chapter.Count = Math.Max(chapter.Count, (int) Math.Floor(float.Parse(comicInfo.Volume)));
|
||||
}
|
||||
chapter.Count = comicInfo.CalculatedCount();
|
||||
|
||||
void AddPerson(Person person)
|
||||
{
|
||||
@ -755,7 +742,6 @@ public class ProcessSeries : IProcessSeries
|
||||
/// <param name="action"></param>
|
||||
private void UpdatePeople(IEnumerable<string> names, PersonRole role, Action<Person> action)
|
||||
{
|
||||
|
||||
var allPeopleTypeRole = _people.Where(p => p.Role == role).ToList();
|
||||
|
||||
foreach (var name in names)
|
||||
|
@ -8,6 +8,7 @@ using System.Threading.Tasks;
|
||||
using API.Data;
|
||||
using API.Data.Repositories;
|
||||
using API.Entities;
|
||||
using API.Entities.Enums;
|
||||
using API.Extensions;
|
||||
using API.Helpers;
|
||||
using API.Parser;
|
||||
@ -97,24 +98,39 @@ public class ScannerService : IScannerService
|
||||
_wordCountAnalyzerService = wordCountAnalyzerService;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Given a generic folder path, will invoke a Series scan or Library scan.
|
||||
/// </summary>
|
||||
/// <remarks>This will Schedule the job to run 1 minute in the future to allow for any close-by duplicate requests to be dropped</remarks>
|
||||
/// <param name="folder"></param>
|
||||
public async Task ScanFolder(string folder)
|
||||
{
|
||||
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
|
||||
if (seriesId > 0)
|
||||
Series series = null;
|
||||
try
|
||||
{
|
||||
if (TaskScheduler.HasAlreadyEnqueuedTask(Name, "ScanSeries",
|
||||
new object[] {seriesId, true}))
|
||||
series = await _unitOfWork.SeriesRepository.GetSeriesByFolderPath(folder, SeriesIncludes.Library);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
if (ex.Message.Equals("Sequence contains more than one element."))
|
||||
{
|
||||
_logger.LogCritical("[ScannerService] Multiple series map to this folder. Library scan will be used for ScanFolder");
|
||||
}
|
||||
}
|
||||
if (series != null && series.Library.Type != LibraryType.Book)
|
||||
{
|
||||
if (TaskScheduler.HasScanTaskRunningForSeries(series.Id))
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
|
||||
return;
|
||||
}
|
||||
BackgroundJob.Enqueue(() => ScanSeries(seriesId, true));
|
||||
BackgroundJob.Schedule(() => ScanSeries(series.Id, true), TimeSpan.FromMinutes(1));
|
||||
return;
|
||||
}
|
||||
|
||||
// This is basically rework of what's already done in Library Watcher but is needed if invoked via API
|
||||
var parentDirectory = _directoryService.GetParentDirectoryName(folder);
|
||||
if (string.IsNullOrEmpty(parentDirectory)) return; // This should never happen as it's calculated before enqueing
|
||||
if (string.IsNullOrEmpty(parentDirectory)) return;
|
||||
|
||||
var libraries = (await _unitOfWork.LibraryRepository.GetLibraryDtosAsync()).ToList();
|
||||
var libraryFolders = libraries.SelectMany(l => l.Folders);
|
||||
@ -125,18 +141,17 @@ public class ScannerService : IScannerService
|
||||
var library = libraries.FirstOrDefault(l => l.Folders.Select(Scanner.Parser.Parser.NormalizePath).Contains(libraryFolder));
|
||||
if (library != null)
|
||||
{
|
||||
if (TaskScheduler.HasAlreadyEnqueuedTask(Name, "ScanLibrary",
|
||||
new object[] {library.Id, false}))
|
||||
if (TaskScheduler.HasScanTaskRunningForLibrary(library.Id))
|
||||
{
|
||||
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this library. Dropping request", folder);
|
||||
return;
|
||||
}
|
||||
BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false));
|
||||
BackgroundJob.Schedule(() => ScanLibrary(library.Id, false), TimeSpan.FromMinutes(1));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
///
|
||||
/// Scans just an existing Series for changes. If the series doesn't exist, will delete it.
|
||||
/// </summary>
|
||||
/// <param name="seriesId"></param>
|
||||
/// <param name="bypassFolderOptimizationChecks">Not Used. Scan series will always force</param>
|
||||
@ -186,6 +201,7 @@ public class ScannerService : IScannerService
|
||||
return;
|
||||
}
|
||||
|
||||
// If the series path doesn't exist anymore, it was either moved or renamed. We need to essentially delete it
|
||||
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
|
||||
@ -213,11 +229,13 @@ public class ScannerService : IScannerService
|
||||
}
|
||||
|
||||
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
|
||||
var scanElapsedTime = await ScanFiles(library, new []{folderPath}, false, TrackFiles, true);
|
||||
var scanElapsedTime = await ScanFiles(library, new []{ folderPath }, false, TrackFiles, true);
|
||||
_logger.LogInformation("ScanFiles for {Series} took {Time}", series.Name, scanElapsedTime);
|
||||
|
||||
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Ended, series.Name));
|
||||
|
||||
|
||||
|
||||
// Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
|
||||
RemoveParsedInfosNotForSeries(parsedSeries, series);
|
||||
|
||||
|
@ -2,6 +2,8 @@
|
||||
<s:String x:Key="/Default/CodeInspection/ExcludedFiles/FilesAndFoldersToSkip2/=1BC0273F_002DFEBE_002D4DA1_002DBC04_002D3A3167E4C86C_002Fd_003AData_002Fd_003AMigrations/@EntryIndexedValue">ExplicitlyExcluded</s:String>
|
||||
<s:Boolean x:Key="/Default/CodeInspection/Highlighting/RunLongAnalysisInSwa/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/CodeInspection/Highlighting/RunValueAnalysisInNullableWarningsEnabledContext2/@EntryValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=kavitaignore/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=kavitaignores/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Omake/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Opds/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=rewinded/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
|
8
UI/Web/package-lock.json
generated
8
UI/Web/package-lock.json
generated
@ -12666,14 +12666,6 @@
|
||||
"tslib": "^2.3.0"
|
||||
}
|
||||
},
|
||||
"ngx-infinite-scroll": {
|
||||
"version": "13.0.2",
|
||||
"resolved": "https://registry.npmjs.org/ngx-infinite-scroll/-/ngx-infinite-scroll-13.0.2.tgz",
|
||||
"integrity": "sha512-RSezL0DUxo1B57SyRMOSt3a/5lLXJs6P8lavtxOh10uhX+hn662cMYHUO7LiU2a/vJxef2R020s4jkUqhnXTcg==",
|
||||
"requires": {
|
||||
"tslib": "^2.3.0"
|
||||
}
|
||||
},
|
||||
"ngx-toastr": {
|
||||
"version": "14.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ngx-toastr/-/ngx-toastr-14.2.1.tgz",
|
||||
|
@ -41,7 +41,6 @@
|
||||
"ngx-color-picker": "^12.0.0",
|
||||
"ngx-extended-pdf-viewer": "^15.0.0",
|
||||
"ngx-file-drop": "^14.0.1",
|
||||
"ngx-infinite-scroll": "^13.0.2",
|
||||
"ngx-toastr": "^14.2.1",
|
||||
"requires": "^1.0.2",
|
||||
"rxjs": "~7.5.4",
|
||||
|
@ -1,103 +0,0 @@
|
||||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
outputFolder='_output'
|
||||
|
||||
ProgressStart()
|
||||
{
|
||||
echo "Start '$1'"
|
||||
}
|
||||
|
||||
ProgressEnd()
|
||||
{
|
||||
echo "Finish '$1'"
|
||||
}
|
||||
|
||||
Build()
|
||||
{
|
||||
local RID="$1"
|
||||
|
||||
ProgressStart "Build for $RID"
|
||||
|
||||
slnFile=Kavita.sln
|
||||
|
||||
dotnet clean $slnFile -c Release
|
||||
|
||||
dotnet msbuild -restore $slnFile -p:Configuration=Release -p:Platform="Any CPU" -p:RuntimeIdentifiers=$RID
|
||||
|
||||
ProgressEnd "Build for $RID"
|
||||
}
|
||||
|
||||
Package()
|
||||
{
|
||||
local framework="$1"
|
||||
local runtime="$2"
|
||||
local lOutputFolder=../_output/"$runtime"/Kavita
|
||||
|
||||
ProgressStart "Creating $runtime Package for $framework"
|
||||
|
||||
# TODO: Use no-restore? Because Build should have already done it for us
|
||||
echo "Building"
|
||||
cd API
|
||||
echo dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
|
||||
dotnet publish -c Release --no-restore --self-contained --runtime $runtime -o "$lOutputFolder" --framework $framework
|
||||
|
||||
echo "Renaming API -> Kavita"
|
||||
mv "$lOutputFolder"/API "$lOutputFolder"/Kavita
|
||||
|
||||
echo "Copying webui wwwroot to build"
|
||||
cp -r wwwroot/* "$lOutputFolder"/wwwroot/
|
||||
|
||||
echo "Copying Install information"
|
||||
cp ../INSTALL.txt "$lOutputFolder"/README.txt
|
||||
|
||||
echo "Copying LICENSE"
|
||||
cp ../LICENSE "$lOutputFolder"/LICENSE.txt
|
||||
|
||||
echo "Creating tar"
|
||||
cd ../$outputFolder/"$runtime"/
|
||||
tar -czvf ../kavita-$runtime.tar.gz Kavita
|
||||
|
||||
ProgressEnd "Creating $runtime Package for $framework"
|
||||
|
||||
}
|
||||
|
||||
BuildUI()
|
||||
{
|
||||
ProgressStart 'Building UI'
|
||||
echo 'Removing old wwwroot'
|
||||
rm -rf API/wwwroot/*
|
||||
cd ../Kavita-webui/ || exit
|
||||
echo 'Installing web dependencies'
|
||||
npm install
|
||||
echo 'Building UI'
|
||||
npm run prod
|
||||
ls -l dist
|
||||
echo 'Copying back to Kavita wwwroot'
|
||||
cp -r dist/* ../Kavita/API/wwwroot
|
||||
ls -l ../Kavita/API/wwwroot
|
||||
cd ../Kavita/ || exit
|
||||
ProgressEnd 'Building UI'
|
||||
}
|
||||
|
||||
dir=$PWD
|
||||
|
||||
if [ -d _output ]
|
||||
then
|
||||
rm -r _output/
|
||||
fi
|
||||
|
||||
#Build for x64
|
||||
Build "linux-x64"
|
||||
Package "net5.0" "linux-x64"
|
||||
cd "$dir"
|
||||
|
||||
#Build for arm
|
||||
Build "linux-arm"
|
||||
Package "net5.0" "linux-arm"
|
||||
cd "$dir"
|
||||
|
||||
#Build for arm64
|
||||
Build "linux-arm64"
|
||||
Package "net5.0" "linux-arm64"
|
||||
cd "$dir"
|
Loading…
x
Reference in New Issue
Block a user