diff --git a/API.Tests/ChapterSortComparerTest.cs b/API.Tests/Comparers/ChapterSortComparerTest.cs
similarity index 94%
rename from API.Tests/ChapterSortComparerTest.cs
rename to API.Tests/Comparers/ChapterSortComparerTest.cs
index 7ab909ec5..11fecf2c2 100644
--- a/API.Tests/ChapterSortComparerTest.cs
+++ b/API.Tests/Comparers/ChapterSortComparerTest.cs
@@ -2,7 +2,7 @@
using API.Comparators;
using Xunit;
-namespace API.Tests
+namespace API.Tests.Comparers
{
public class ChapterSortComparerTest
{
diff --git a/API.Tests/Comparers/NaturalSortComparerTest.cs b/API.Tests/Comparers/NaturalSortComparerTest.cs
new file mode 100644
index 000000000..1e4c63daf
--- /dev/null
+++ b/API.Tests/Comparers/NaturalSortComparerTest.cs
@@ -0,0 +1,31 @@
+using System;
+using API.Comparators;
+using Xunit;
+
+namespace API.Tests.Comparers
+{
+ public class NaturalSortComparerTest
+ {
+ [Theory]
+ [InlineData(
+ new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"},
+ new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"}
+ )]
+ [InlineData(
+ new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",},
+ new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"}
+ )]
+ public void TestNaturalSortComparer(string[] input, string[] expected)
+ {
+ NaturalSortComparer nc = new NaturalSortComparer();
+ Array.Sort(input, nc);
+
+ var i = 0;
+ foreach (var s in input)
+ {
+ Assert.Equal(s, expected[i]);
+ i++;
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/API.Tests/Services/StringLogicalComparerTest.cs b/API.Tests/Comparers/StringLogicalComparerTest.cs
similarity index 95%
rename from API.Tests/Services/StringLogicalComparerTest.cs
rename to API.Tests/Comparers/StringLogicalComparerTest.cs
index 3ffa0f8a6..ae93b3b46 100644
--- a/API.Tests/Services/StringLogicalComparerTest.cs
+++ b/API.Tests/Comparers/StringLogicalComparerTest.cs
@@ -2,7 +2,7 @@
using API.Comparators;
using Xunit;
-namespace API.Tests.Services
+namespace API.Tests.Comparers
{
public class StringLogicalComparerTest
{
diff --git a/API.Tests/ParserTest.cs b/API.Tests/ParserTest.cs
index 6e126c2d8..d5c1addec 100644
--- a/API.Tests/ParserTest.cs
+++ b/API.Tests/ParserTest.cs
@@ -59,6 +59,7 @@ namespace API.Tests
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
[InlineData("Gantz.V26.cbz", "26")]
+ [InlineData("NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar", "4")]
public void ParseVolumeTest(string filename, string expected)
{
Assert.Equal(expected, ParseVolume(filename));
@@ -176,6 +177,8 @@ namespace API.Tests
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
+ [InlineData("Beelzebub_153b_RHS.zip", "153.5")]
+ [InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
public void ParseChaptersTest(string filename, string expected)
{
Assert.Equal(expected, ParseChapter(filename));
@@ -284,6 +287,7 @@ namespace API.Tests
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")]
[InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")]
+ [InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")]
public void ParseComicSeriesTest(string filename, string expected)
{
Assert.Equal(expected, ParseComicSeries(filename));
diff --git a/API.Tests/Services/ArchiveServiceTests.cs b/API.Tests/Services/ArchiveServiceTests.cs
index e448ddf1c..b8658af0d 100644
--- a/API.Tests/Services/ArchiveServiceTests.cs
+++ b/API.Tests/Services/ArchiveServiceTests.cs
@@ -58,6 +58,9 @@ namespace API.Tests.Services
[InlineData("file in folder in folder.zip", 1)]
[InlineData("file in folder.zip", 1)]
[InlineData("file in folder_alt.zip", 1)]
+ [InlineData("macos_none.zip", 0)]
+ [InlineData("macos_one.zip", 1)]
+ [InlineData("macos_native.zip", 21)]
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
@@ -118,6 +121,7 @@ namespace API.Tests.Services
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")]
//[InlineData("png.zip", "png.PNG")]
+ [InlineData("macos_native.zip", "macos_native.jpg")]
public void GetCoverImageTest(string inputFile, string expectedOutputFile)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages");
@@ -128,10 +132,11 @@ namespace API.Tests.Services
}
[Theory]
- [InlineData("06_v01[DMM].zip")]
+ [InlineData("Archives/macos_native.zip")]
+ [InlineData("Formats/One File with DB_Supported.zip")]
public void CanParseCoverImage(string inputFile)
{
- var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
+ var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/");
Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile)));
}
diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip b/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip
deleted file mode 100644
index 8fa84e0ac..000000000
Binary files a/API.Tests/Services/Test Data/ArchiveService/Archives/06_v01[DMM].zip and /dev/null differ
diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/macos_native.zip b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_native.zip
new file mode 100644
index 000000000..a84f32b35
Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_native.zip differ
diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/macos_none.zip b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_none.zip
new file mode 100644
index 000000000..abf5fb125
Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_none.zip differ
diff --git a/API.Tests/Services/Test Data/ArchiveService/Archives/macos_one.zip b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_one.zip
new file mode 100644
index 000000000..67d45d0c0
Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Archives/macos_one.zip differ
diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.jpg b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.jpg
new file mode 100644
index 000000000..575b9e556
Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.jpg differ
diff --git a/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.zip b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.zip
new file mode 100644
index 000000000..a84f32b35
Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/CoverImages/macos_native.zip differ
diff --git a/API.Tests/Services/Test Data/ArchiveService/Formats/One File with DB_Supported.zip b/API.Tests/Services/Test Data/ArchiveService/Formats/One File with DB_Supported.zip
new file mode 100644
index 000000000..6199192cb
Binary files /dev/null and b/API.Tests/Services/Test Data/ArchiveService/Formats/One File with DB_Supported.zip differ
diff --git a/API/API.csproj b/API/API.csproj
index 68c02f049..465284979 100644
--- a/API/API.csproj
+++ b/API/API.csproj
@@ -25,6 +25,7 @@
+
diff --git a/API/Comparators/ChapterSortComparer.cs b/API/Comparators/ChapterSortComparer.cs
index 1798afe7e..249532466 100644
--- a/API/Comparators/ChapterSortComparer.cs
+++ b/API/Comparators/ChapterSortComparer.cs
@@ -4,17 +4,6 @@ namespace API.Comparators
{
public class ChapterSortComparer : IComparer
{
- // public int Compare(int x, int y)
- // {
- // if (x == 0 && y == 0) return 0;
- // // if x is 0, it comes second
- // if (x == 0) return 1;
- // // if y is 0, it comes second
- // if (y == 0) return -1;
- //
- // return x.CompareTo(y);
- // }
-
public int Compare(float x, float y)
{
if (x == 0.0 && y == 0.0) return 0;
diff --git a/API/Comparators/NaturalSortComparer.cs b/API/Comparators/NaturalSortComparer.cs
new file mode 100644
index 000000000..8ba4a9ec9
--- /dev/null
+++ b/API/Comparators/NaturalSortComparer.cs
@@ -0,0 +1,95 @@
+using System;
+using System.Collections.Generic;
+using System.Text.RegularExpressions;
+using static System.GC;
+
+namespace API.Comparators
+{
+ public class NaturalSortComparer : IComparer, IDisposable
+ {
+ private readonly bool _isAscending;
+
+ public NaturalSortComparer(bool inAscendingOrder = true)
+ {
+ _isAscending = inAscendingOrder;
+ }
+
+ #region IComparer Members
+
+ public int Compare(string x, string y)
+ {
+ throw new NotImplementedException();
+ }
+
+ #endregion
+
+ #region IComparer Members
+
+ int IComparer.Compare(string x, string y)
+ {
+ if (x == y)
+ return 0;
+
+ string[] x1, y1;
+
+ if (!_table.TryGetValue(x, out x1))
+ {
+ x1 = Regex.Split(x.Replace(" ", ""), "([0-9]+)");
+ _table.Add(x, x1);
+ }
+
+ if (!_table.TryGetValue(y ?? string.Empty, out y1))
+ {
+ y1 = Regex.Split(y?.Replace(" ", ""), "([0-9]+)");
+ _table.Add(y, y1);
+ }
+
+ int returnVal;
+
+ for (var i = 0; i < x1.Length && i < y1.Length; i++)
+ {
+ if (x1[i] == y1[i]) continue;
+ returnVal = PartCompare(x1[i], y1[i]);
+ return _isAscending ? returnVal : -returnVal;
+ }
+
+ if (y1.Length > x1.Length)
+ {
+ returnVal = 1;
+ }
+ else if (x1.Length > y1.Length)
+ {
+ returnVal = -1;
+ }
+ else
+ {
+ returnVal = 0;
+ }
+
+ return _isAscending ? returnVal : -returnVal;
+ }
+
+ private static int PartCompare(string left, string right)
+ {
+ int x, y;
+ if (!int.TryParse(left, out x))
+ return left.CompareTo(right);
+
+ if (!int.TryParse(right, out y))
+ return left.CompareTo(right);
+
+ return x.CompareTo(y);
+ }
+
+ #endregion
+
+ private Dictionary _table = new Dictionary();
+
+ public void Dispose()
+ {
+ SuppressFinalize(this);
+ _table.Clear();
+ _table = null;
+ }
+ }
+}
\ No newline at end of file
diff --git a/API/Controllers/FallbackController.cs b/API/Controllers/FallbackController.cs
index 56962a3d6..ecd0315e2 100644
--- a/API/Controllers/FallbackController.cs
+++ b/API/Controllers/FallbackController.cs
@@ -7,6 +7,7 @@ namespace API.Controllers
public class FallbackController : Controller
{
// ReSharper disable once S4487
+ // ReSharper disable once NotAccessedField.Local
private readonly ITaskScheduler _taskScheduler;
public FallbackController(ITaskScheduler taskScheduler)
diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs
index bc085114b..c1b6df2b8 100644
--- a/API/Controllers/LibraryController.cs
+++ b/API/Controllers/LibraryController.cs
@@ -155,7 +155,7 @@ namespace API.Controllers
[HttpPost("refresh-metadata")]
public ActionResult RefreshMetadata(int libraryId)
{
- _taskScheduler.ScanLibrary(libraryId, true);
+ _taskScheduler.RefreshMetadata(libraryId);
return Ok();
}
@@ -164,23 +164,7 @@ namespace API.Controllers
{
return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername()));
}
-
- [HttpGet("series")]
- public async Task>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams)
- {
- // TODO: Move this to SeriesController
- var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
- var series =
- await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams);
-
- // Apply progress/rating information (I can't work out how to do this in initial query)
- await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series);
-
- Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages);
-
- return Ok(series);
- }
-
+
[Authorize(Policy = "RequireAdminRole")]
[HttpDelete("delete")]
public async Task> DeleteLibrary(int libraryId)
diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs
index b2eb50ee1..bb3edc787 100644
--- a/API/Controllers/ReaderController.cs
+++ b/API/Controllers/ReaderController.cs
@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
-using API.Data;
using API.DTOs;
using API.Entities;
using API.Extensions;
diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs
index e63be3eb5..a2d350ebd 100644
--- a/API/Controllers/SeriesController.cs
+++ b/API/Controllers/SeriesController.cs
@@ -3,6 +3,7 @@ using System.Threading.Tasks;
using API.DTOs;
using API.Entities;
using API.Extensions;
+using API.Helpers;
using API.Interfaces;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
@@ -23,6 +24,21 @@ namespace API.Controllers
_unitOfWork = unitOfWork;
}
+ [HttpGet]
+ public async Task>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams)
+ {
+ var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
+ var series =
+ await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams);
+
+ // Apply progress/rating information (I can't work out how to do this in initial query)
+ await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series);
+
+ Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages);
+
+ return Ok(series);
+ }
+
[HttpGet("{seriesId}")]
public async Task> GetSeries(int seriesId)
{
@@ -105,11 +121,9 @@ namespace API.Controllers
if (series == null) return BadRequest("Series does not exist");
- // TODO: Ensure we check against Library for Series Name change
- var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name);
- if (existingSeries != null && existingSeries.Id != series.Id )
+ if (series.Name != updateSeries.Name && await _unitOfWork.SeriesRepository.DoesSeriesNameExistInLibrary(updateSeries.Name))
{
- return BadRequest("A series already exists with this name. Name must be unique.");
+ return BadRequest("A series already exists in this library with this name. Series Names must be unique to a library.");
}
series.Name = updateSeries.Name;
series.LocalizedName = updateSeries.LocalizedName;
diff --git a/API/Controllers/ServerController.cs b/API/Controllers/ServerController.cs
index 2889e2317..475323e07 100644
--- a/API/Controllers/ServerController.cs
+++ b/API/Controllers/ServerController.cs
@@ -3,7 +3,6 @@ using System.IO;
using System.IO.Compression;
using System.Threading.Tasks;
using API.Extensions;
-using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Microsoft.AspNetCore.Authorization;
diff --git a/API/DTOs/SearchResultDto.cs b/API/DTOs/SearchResultDto.cs
index 114b3d03b..001883b23 100644
--- a/API/DTOs/SearchResultDto.cs
+++ b/API/DTOs/SearchResultDto.cs
@@ -6,6 +6,7 @@
public string Name { get; init; }
public string OriginalName { get; init; }
public string SortName { get; init; }
+ public string LocalizedName { get; init; }
// Grouping information
public string LibraryName { get; set; }
diff --git a/API/Data/AppUserProgressRepository.cs b/API/Data/AppUserProgressRepository.cs
new file mode 100644
index 000000000..c39430e61
--- /dev/null
+++ b/API/Data/AppUserProgressRepository.cs
@@ -0,0 +1,32 @@
+using System.Linq;
+using System.Threading.Tasks;
+using API.Interfaces;
+using Microsoft.EntityFrameworkCore;
+
+namespace API.Data
+{
+ public class AppUserProgressRepository : IAppUserProgressRepository
+ {
+ private readonly DataContext _context;
+
+ public AppUserProgressRepository(DataContext context)
+ {
+ _context = context;
+ }
+
+ ///
+ /// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well.
+ ///
+ public async Task CleanupAbandonedChapters()
+ {
+ var chapterIds = _context.Chapter.Select(c => c.Id);
+
+ var rowsToRemove = await _context.AppUserProgresses
+ .Where(progress => !chapterIds.Contains(progress.ChapterId))
+ .ToListAsync();
+
+ _context.RemoveRange(rowsToRemove);
+ return (await _context.SaveChangesAsync()) > 0;
+ }
+ }
+}
\ No newline at end of file
diff --git a/API/Data/LibraryRepository.cs b/API/Data/LibraryRepository.cs
index 9a46b44a4..d482193ce 100644
--- a/API/Data/LibraryRepository.cs
+++ b/API/Data/LibraryRepository.cs
@@ -35,15 +35,13 @@ namespace API.Data
public async Task> GetLibraryDtosForUsernameAsync(string userName)
{
- Stopwatch sw = Stopwatch.StartNew();
- var libs = await _context.Library
+ return await _context.Library
.Include(l => l.AppUsers)
.Where(library => library.AppUsers.Any(x => x.UserName == userName))
+ .OrderBy(l => l.Name)
.ProjectTo(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync();
- Console.WriteLine("Processed GetLibraryDtosForUsernameAsync in {0} milliseconds", sw.ElapsedMilliseconds);
- return libs;
}
public async Task> GetLibrariesAsync()
@@ -73,7 +71,10 @@ namespace API.Data
{
return await _context.Library
.Include(f => f.Folders)
- .ProjectTo(_mapper.ConfigurationProvider).ToListAsync();
+ .OrderBy(l => l.Name)
+ .ProjectTo(_mapper.ConfigurationProvider)
+ .AsNoTracking()
+ .ToListAsync();
}
public async Task GetLibraryForIdAsync(int libraryId)
@@ -98,19 +99,25 @@ namespace API.Data
.ThenInclude(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
+ .AsSplitQuery()
.SingleAsync();
}
public async Task LibraryExists(string libraryName)
{
- return await _context.Library.AnyAsync(x => x.Name == libraryName);
+ return await _context.Library
+ .AsNoTracking()
+ .AnyAsync(x => x.Name == libraryName);
}
public async Task> GetLibrariesForUserAsync(AppUser user)
{
- return await _context.Library.Where(library => library.AppUsers.Contains(user))
+ return await _context.Library
+ .Where(library => library.AppUsers.Contains(user))
.Include(l => l.Folders)
- .ProjectTo(_mapper.ConfigurationProvider).ToListAsync();
+ .AsNoTracking()
+ .ProjectTo(_mapper.ConfigurationProvider)
+ .ToListAsync();
}
diff --git a/API/Data/SeriesRepository.cs b/API/Data/SeriesRepository.cs
index 7124f0932..652316bf2 100644
--- a/API/Data/SeriesRepository.cs
+++ b/API/Data/SeriesRepository.cs
@@ -51,6 +51,19 @@ namespace API.Data
{
return await _context.Series.SingleOrDefaultAsync(x => x.Name == name);
}
+
+ public async Task DoesSeriesNameExistInLibrary(string name)
+ {
+ var libraries = _context.Series
+ .AsNoTracking()
+ .Where(x => x.Name == name)
+ .Select(s => s.LibraryId);
+
+ return await _context.Series
+ .AsNoTracking()
+ .Where(s => libraries.Contains(s.LibraryId) && s.Name == name)
+ .CountAsync() > 1;
+ }
public Series GetSeriesByName(string name)
{
diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs
index ae2f909a9..8d0491fae 100644
--- a/API/Data/UnitOfWork.cs
+++ b/API/Data/UnitOfWork.cs
@@ -29,6 +29,8 @@ namespace API.Data
public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper);
public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper);
+
+ public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context);
public async Task Complete()
{
diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs
index 4be77a4c1..ddf2ea3fc 100644
--- a/API/Entities/MangaFile.cs
+++ b/API/Entities/MangaFile.cs
@@ -1,7 +1,6 @@
using System;
using API.Entities.Enums;
-using API.Entities.Interfaces;
namespace API.Entities
{
diff --git a/API/Extensions/ApplicationServiceExtensions.cs b/API/Extensions/ApplicationServiceExtensions.cs
index 69e5a4a68..89b338e5c 100644
--- a/API/Extensions/ApplicationServiceExtensions.cs
+++ b/API/Extensions/ApplicationServiceExtensions.cs
@@ -4,7 +4,6 @@ using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using API.Services.Tasks;
-using AutoMapper;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
diff --git a/API/Helpers/Converters/CronConverter.cs b/API/Helpers/Converters/CronConverter.cs
index c31afb417..6fece1bdb 100644
--- a/API/Helpers/Converters/CronConverter.cs
+++ b/API/Helpers/Converters/CronConverter.cs
@@ -5,7 +5,6 @@ namespace API.Helpers.Converters
{
public static class CronConverter
{
- // TODO: this isn't used. Replace strings with Enums?
public static readonly IEnumerable Options = new []
{
"disabled",
diff --git a/API/Interfaces/IAppUserProgressRepository.cs b/API/Interfaces/IAppUserProgressRepository.cs
new file mode 100644
index 000000000..62a252661
--- /dev/null
+++ b/API/Interfaces/IAppUserProgressRepository.cs
@@ -0,0 +1,9 @@
+using System.Threading.Tasks;
+
+namespace API.Interfaces
+{
+ public interface IAppUserProgressRepository
+ {
+ Task CleanupAbandonedChapters();
+ }
+}
\ No newline at end of file
diff --git a/API/Interfaces/ISeriesRepository.cs b/API/Interfaces/ISeriesRepository.cs
index 34a1715dc..be39e10d4 100644
--- a/API/Interfaces/ISeriesRepository.cs
+++ b/API/Interfaces/ISeriesRepository.cs
@@ -11,6 +11,7 @@ namespace API.Interfaces
void Add(Series series);
void Update(Series series);
Task GetSeriesByNameAsync(string name);
+ Task DoesSeriesNameExistInLibrary(string name);
Series GetSeriesByName(string name);
///
diff --git a/API/Interfaces/IUnitOfWork.cs b/API/Interfaces/IUnitOfWork.cs
index 24a074e29..fb81313eb 100644
--- a/API/Interfaces/IUnitOfWork.cs
+++ b/API/Interfaces/IUnitOfWork.cs
@@ -9,6 +9,7 @@ namespace API.Interfaces
ILibraryRepository LibraryRepository { get; }
IVolumeRepository VolumeRepository { get; }
ISettingsRepository SettingsRepository { get; }
+ IAppUserProgressRepository AppUserProgressRepository { get; }
Task Complete();
bool HasChanges();
}
diff --git a/API/Interfaces/Services/IScannerService.cs b/API/Interfaces/Services/IScannerService.cs
index 695bc59c5..dc9c44623 100644
--- a/API/Interfaces/Services/IScannerService.cs
+++ b/API/Interfaces/Services/IScannerService.cs
@@ -1,4 +1,5 @@
-namespace API.Interfaces.Services
+
+namespace API.Interfaces.Services
{
public interface IScannerService
{
@@ -9,7 +10,6 @@
/// Library to scan against
/// Force overwriting for cover images
void ScanLibrary(int libraryId, bool forceUpdate);
-
void ScanLibraries();
}
}
\ No newline at end of file
diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs
index b064a536e..5cc6b1df3 100644
--- a/API/Parser/Parser.cs
+++ b/API/Parser/Parser.cs
@@ -9,11 +9,11 @@ namespace API.Parser
{
public static class Parser
{
- public static readonly string MangaFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip";
+ public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip";
public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)";
private static readonly string XmlRegexExtensions = @"\.xml";
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
- private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
+ private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex[] MangaVolumeRegex = new[]
@@ -22,6 +22,10 @@ namespace API.Parser
new Regex(
@"(?.*)(\b|_)v(?\d+-?\d+)( |_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
+ // NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar
+ new Regex(
+ @"(?.*)(\b|_)(?!\[)(vol\.?)(?\d+(-\d+)?)(?!\])",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
new Regex(
@"(?.*)(\b|_)(?!\[)v(?\d+(-\d+)?)(?!\])",
@@ -144,6 +148,10 @@ namespace API.Parser
private static readonly Regex[] ComicSeriesRegex = new[]
{
+ // Invincible Vol 01 Family matters (2005) (Digital)
+ new Regex(
+ @"(?.*)(\b|_)(vol\.?)( |_)(?\d+(-\d+)?)",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled),
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
new Regex(
@"^(?\d+) (- |_)?(?.*(\d{4})?)( |_)(\(|\d+)",
@@ -275,9 +283,9 @@ namespace API.Parser
new Regex(
@"(?.*) S(?\d+) (?\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
- // Beelzebub_01_[Noodles].zip
+ // Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip
new Regex(
- @"^((?!v|vo|vol|Volume).)*( |_)(?\.?\d+(?:.\d+|-\d+)?)( |_|\[|\()",
+ @"^((?!v|vo|vol|Volume).)*( |_)(?\.?\d+(?:.\d+|-\d+)?)(?b)?( |_|\[|\()",
RegexOptions.IgnoreCase | RegexOptions.Compiled),
// Yumekui-Merry_DKThias_Chapter21.zip
new Regex(
@@ -531,12 +539,16 @@ namespace API.Parser
if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue;
var value = match.Groups["Chapter"].Value;
+ var hasChapterPart = match.Groups["ChapterPart"].Success;
- if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Chapter"].Value);
+ if (!value.Contains("-"))
+ {
+ return RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(value) : value);
+ }
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
- var to = RemoveLeadingZeroes(tokens[1]);
+ var to = RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(tokens[1]) : tokens[1]);
return $"{@from}-{to}";
}
@@ -544,6 +556,16 @@ namespace API.Parser
return "0";
}
+
+ private static string AddChapterPart(string value)
+ {
+ if (value.Contains("."))
+ {
+ return value;
+ }
+
+ return $"{value}.5";
+ }
public static string ParseComicChapter(string filename)
{
@@ -697,7 +719,7 @@ namespace API.Parser
public static bool IsArchive(string filePath)
{
- return MangaFileRegex.IsMatch(Path.GetExtension(filePath));
+ return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath));
}
public static bool IsImage(string filePath)
diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs
index 03635188a..b598f3e8a 100644
--- a/API/Services/ArchiveService.cs
+++ b/API/Services/ArchiveService.cs
@@ -10,6 +10,7 @@ using API.Extensions;
using API.Interfaces.Services;
using API.Services.Tasks;
using Microsoft.Extensions.Logging;
+using Microsoft.IO;
using SharpCompress.Archives;
using SharpCompress.Common;
using Image = NetVips.Image;
@@ -22,7 +23,8 @@ namespace API.Services
public class ArchiveService : IArchiveService
{
private readonly ILogger _logger;
- private const int ThumbnailWidth = 320; // 153w x 230h TODO: Look into optimizing the images to be smaller
+ private const int ThumbnailWidth = 320; // 153w x 230h
+ private static readonly RecyclableMemoryStreamManager _streamManager = new RecyclableMemoryStreamManager();
public ArchiveService(ILogger logger)
{
@@ -74,13 +76,15 @@ namespace API.Services
{
_logger.LogDebug("Using default compression handling");
using ZipArchive archive = ZipFile.OpenRead(archivePath);
- return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
+ return archive.Entries.Count(e => !e.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(e.FullName));
}
case ArchiveLibrary.SharpCompress:
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
- return archive.Entries.Count(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key));
+ return archive.Entries.Count(entry => !entry.IsDirectory &&
+ !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
+ && Parser.Parser.IsImage(entry.Key));
}
case ArchiveLibrary.NotSupported:
_logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
@@ -117,8 +121,8 @@ namespace API.Services
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
- var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
- var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
+ var folder = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
+ var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && !x.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
var entry = folder ?? entries[0];
return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry);
@@ -127,7 +131,9 @@ namespace API.Services
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
- return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
+ return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory
+ && !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
+ && Parser.Parser.IsImage(entry.Key)), createThumbnail);
}
case ArchiveLibrary.NotSupported:
_logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath);
@@ -152,10 +158,11 @@ namespace API.Services
{
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder")
{
- using var ms = new MemoryStream();
+ using var ms = _streamManager.GetStream();
entry.WriteTo(ms);
ms.Position = 0;
- return createThumbnail ? CreateThumbnail(ms.ToArray(), Path.GetExtension(entry.Key)) : ms.ToArray();
+ var data = ms.ToArray();
+ return createThumbnail ? CreateThumbnail(data, Path.GetExtension(entry.Key)) : data;
}
}
@@ -163,7 +170,7 @@ namespace API.Services
{
var entry = images.OrderBy(e => e.Key).FirstOrDefault();
if (entry == null) return Array.Empty();
- using var ms = new MemoryStream();
+ using var ms = _streamManager.GetStream();
entry.WriteTo(ms);
ms.Position = 0;
var data = ms.ToArray();
@@ -176,11 +183,9 @@ namespace API.Services
private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry)
{
using var stream = entry.Open();
- using var ms = new MemoryStream();
+ using var ms = _streamManager.GetStream();
stream.CopyTo(ms);
- var data = ms.ToArray();
-
- return data;
+ return ms.ToArray();
}
///
@@ -194,7 +199,7 @@ namespace API.Services
// Sometimes ZipArchive will list the directory and others it will just keep it in the FullName
return archive.Entries.Count > 0 &&
!Path.HasExtension(archive.Entries.ElementAt(0).FullName) ||
- archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar));
+ archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !e.FullName.Contains("__MACOSX"));
}
private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg")
@@ -211,7 +216,7 @@ namespace API.Services
}
catch (Exception ex)
{
- _logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
+ _logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image. Format Extension {Extension}", formatExtension);
}
return Array.Empty();
@@ -263,7 +268,7 @@ namespace API.Services
{
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key))
{
- using var ms = new MemoryStream();
+ using var ms = _streamManager.GetStream();
entry.WriteTo(ms);
ms.Position = 0;
@@ -295,7 +300,7 @@ namespace API.Services
{
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
- var entry = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
+ var entry = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
if (entry != null)
{
using var stream = entry.Open();
@@ -308,7 +313,9 @@ namespace API.Services
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
- info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
+ info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
+ && !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
+ && Parser.Parser.IsXml(entry.Key)));
break;
}
case ArchiveLibrary.NotSupported:
@@ -392,7 +399,9 @@ namespace API.Services
{
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
- ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
+ ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
+ && !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
+ && Parser.Parser.IsImage(entry.Key)), extractPath);
break;
}
case ArchiveLibrary.NotSupported:
diff --git a/API/Services/ComicInfo.cs b/API/Services/ComicInfo.cs
index 1f994d224..8277cfb35 100644
--- a/API/Services/ComicInfo.cs
+++ b/API/Services/ComicInfo.cs
@@ -2,14 +2,14 @@
{
public class ComicInfo
{
- public string Summary;
- public string Title;
- public string Series;
- public string Notes;
- public string Publisher;
- public string Genre;
- public int PageCount;
- public string LanguageISO;
- public string Web;
+ public string Summary { get; set; }
+ public string Title { get; set; }
+ public string Series { get; set; }
+ public string Notes { get; set; }
+ public string Publisher { get; set; }
+ public string Genre { get; set; }
+ public int PageCount { get; set; }
+ public string LanguageISO { get; set; }
+ public string Web { get; set; }
}
}
\ No newline at end of file
diff --git a/API/Services/DirectoryService.cs b/API/Services/DirectoryService.cs
index 69a1b17dd..1049e3aae 100644
--- a/API/Services/DirectoryService.cs
+++ b/API/Services/DirectoryService.cs
@@ -6,10 +6,8 @@ using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
-using API.DTOs;
using API.Interfaces.Services;
using Microsoft.Extensions.Logging;
-using NetVips;
namespace API.Services
{
@@ -60,6 +58,7 @@ namespace API.Services
{
rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
}
+ // NOTE: I Could use Path.GetRelativePath and split on separator character instead.
var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath;
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs
index 889bde5f9..1a7c7fe3e 100644
--- a/API/Services/MetadataService.cs
+++ b/API/Services/MetadataService.cs
@@ -4,6 +4,7 @@ using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
+using API.Comparators;
using API.Entities;
using API.Extensions;
using API.Interfaces;
@@ -45,9 +46,9 @@ namespace API.Services
{
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
{
- // TODO: Create a custom sorter for Chapters so it's consistent across the application
+ // TODO: Replace this with ChapterSortComparator
volume.Chapters ??= new List();
- var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
+ var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number)).FirstOrDefault();
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
// Skip calculating Cover Image (I/O) if the chapter already has it set
@@ -67,16 +68,29 @@ namespace API.Services
public void UpdateMetadata(Series series, bool forceUpdate)
{
+ // TODO: Use new ChapterSortComparer() here instead
if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
{
series.Volumes ??= new List();
var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
+ byte[] coverImage = null;
if (firstCover == null && series.Volumes.Any())
{
- firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0);
+ // If firstCover is null and one volume, the whole series is Chapters under Vol 0.
+ if (series.Volumes.Count == 1)
+ {
+ coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number))
+ .FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
+ }
+
+ if (coverImage == null)
+ {
+ coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number))
+ .FirstOrDefault()?.CoverImage;
+ }
}
- series.CoverImage = firstCover?.CoverImage;
+ series.CoverImage = firstCover?.CoverImage ?? coverImage;
}
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
@@ -88,22 +102,20 @@ namespace API.Services
if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified))
{
series.Summary = _archiveService.GetSummaryInfo(firstFile.FilePath);
+ firstFile.LastModified = DateTime.Now;
}
}
+
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
- var library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
- var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
-
+ var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
+
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
- foreach (var series in allSeries)
+ foreach (var series in library.Series)
{
- series.NormalizedName = Parser.Parser.Normalize(series.Name);
-
- var volumes = Task.Run(() => _unitOfWork.SeriesRepository.GetVolumes(series.Id)).Result.ToList();
- foreach (var volume in volumes)
+ foreach (var volume in series.Volumes)
{
foreach (var chapter in volume.Chapters)
{
diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs
index 23e7040d0..b4c0e8cdf 100644
--- a/API/Services/TaskScheduler.cs
+++ b/API/Services/TaskScheduler.cs
@@ -5,8 +5,6 @@ using API.Helpers.Converters;
using API.Interfaces;
using API.Interfaces.Services;
using Hangfire;
-using Microsoft.AspNetCore.Hosting;
-using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace API.Services
@@ -25,8 +23,7 @@ namespace API.Services
public TaskScheduler(ICacheService cacheService, ILogger logger, IScannerService scannerService,
- IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
- IWebHostEnvironment env)
+ IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService)
{
_cacheService = cacheService;
_logger = logger;
@@ -36,17 +33,7 @@ namespace API.Services
_backupService = backupService;
_cleanupService = cleanupService;
- if (!env.IsDevelopment())
- {
- ScheduleTasks();
- }
- else
- {
- RecurringJob.RemoveIfExists("scan-libraries");
- RecurringJob.RemoveIfExists("backup");
- RecurringJob.RemoveIfExists("cleanup");
- }
-
+ ScheduleTasks();
}
public void ScheduleTasks()
@@ -56,8 +43,9 @@ namespace API.Services
string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
if (setting != null)
{
- _logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting);
- RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting));
+ _logger.LogDebug("Scheduling Scan Library Task for {Setting}", setting);
+ RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(),
+ () => CronConverter.ConvertToCronNotation(setting));
}
else
{
@@ -67,7 +55,7 @@ namespace API.Services
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
if (setting != null)
{
- _logger.LogDebug("Scheduling Backup Task for {Cron}", setting);
+ _logger.LogDebug("Scheduling Backup Task for {Setting}", setting);
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting));
}
else
@@ -80,10 +68,10 @@ namespace API.Services
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
- // TODO: We shouldn't queue up a job if one is already in progress
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
- BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change
+ // When we do a scan, force cache to re-unpack in case page numbers change
+ BackgroundJob.Enqueue(() => _cleanupService.Cleanup());
}
public void CleanupChapters(int[] chapterIds)
diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs
index 96e9620f1..68b4fbb2c 100644
--- a/API/Services/Tasks/ScannerService.cs
+++ b/API/Services/Tasks/ScannerService.cs
@@ -23,7 +23,6 @@ namespace API.Services.Tasks
private readonly IArchiveService _archiveService;
private readonly IMetadataService _metadataService;
private ConcurrentDictionary> _scannedSeries;
- private bool _forceUpdate;
public ScannerService(IUnitOfWork unitOfWork, ILogger logger, IArchiveService archiveService,
IMetadataService metadataService)
@@ -34,8 +33,9 @@ namespace API.Services.Tasks
_metadataService = metadataService;
}
- [DisableConcurrentExecution(timeoutInSeconds: 5)]
- [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
+
+ [DisableConcurrentExecution(timeoutInSeconds: 360)]
+ //[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibraries()
{
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
@@ -60,22 +60,15 @@ namespace API.Services.Tasks
//return false;
}
- private void Cleanup()
- {
- _scannedSeries = null;
- }
-
- [DisableConcurrentExecution(5)]
- [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
+ [DisableConcurrentExecution(360)]
+ //[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate)
{
- _forceUpdate = forceUpdate;
var sw = Stopwatch.StartNew();
- Cleanup();
- Library library;
+ Library library;
try
{
- library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
+ library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
}
catch (Exception ex)
{
@@ -84,8 +77,10 @@ namespace API.Services.Tasks
return;
}
- _scannedSeries = new ConcurrentDictionary>();
+
_logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate);
+
+ _scannedSeries = new ConcurrentDictionary>();
var totalFiles = 0;
var skippedFolders = 0;
@@ -104,7 +99,7 @@ namespace API.Services.Tasks
{
_logger.LogError(exception, "The file {Filename} could not be found", f);
}
- }, Parser.Parser.MangaFileExtensions);
+ }, Parser.Parser.ArchiveFileExtensions);
}
catch (ArgumentException ex) {
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path);
@@ -120,15 +115,15 @@ namespace API.Services.Tasks
{
_logger.LogInformation("All Folders were skipped due to no modifications to the directories");
_unitOfWork.LibraryRepository.Update(library);
+ _scannedSeries = null;
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name);
- Cleanup();
return;
}
-
+
// Remove any series where there were no parsed infos
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
-
+
UpdateLibrary(library, series);
_unitOfWork.LibraryRepository.Update(library);
@@ -140,8 +135,22 @@ namespace API.Services.Tasks
{
_logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
+ _scannedSeries = null;
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
+
+ // Cleanup any user progress that doesn't exist
+ var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result;
+ if (cleanedUp)
+ {
+ _logger.LogInformation("Removed all abandoned progress rows");
+ }
+ else
+ {
+ _logger.LogWarning("There are abandoned user progress entities in the DB. In Progress activity stream will be skewed");
+ }
+
+ BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
}
private void UpdateLibrary(Library library, Dictionary> parsedSeries)
@@ -191,7 +200,6 @@ namespace API.Services.Tasks
_logger.LogInformation("Processing series {SeriesName}", series.Name);
UpdateVolumes(series, parsedSeries[series.Name].ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages);
- _metadataService.UpdateMetadata(series, _forceUpdate);
});
@@ -221,28 +229,16 @@ namespace API.Services.Tasks
series.Volumes.Add(volume);
}
- volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); // TODO: I don't think we need this as chapters now handle specials
+ // NOTE: I don't think we need this as chapters now handle specials
+ volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial);
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
- // Remove any instances of Chapters with Range of 0. Range of 0 chapters are no longer supported.
- //volume.Chapters = volume.Chapters.Where(c => c.IsSpecial && c.Files.Count > 1).ToList();
+
UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages);
- _metadataService.UpdateMetadata(volume, _forceUpdate);
}
-
-
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
- var existingVolumes = series.Volumes.ToList();
- foreach (var volume in existingVolumes)
- {
- // I can't remove based on chapter count as I haven't updated Chapters || volume.Chapters.Count == 0
- var hasInfo = parsedInfos.Any(v => v.Volumes == volume.Name);
- if (!hasInfo)
- {
- series.Volumes.Remove(volume);
- }
- }
+ series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList();
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
series.Name, startingVolumeCount, series.Volumes.Count);
@@ -256,51 +252,62 @@ namespace API.Services.Tasks
// Add new chapters
foreach (var info in parsedInfos)
{
- // Specials go into their own chapters with Range being their filename and IsSpecial = True
- // BUG: If we have an existing chapter with Range == 0 and it has our file, we wont split.
- var chapter = info.IsSpecial ? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath)))
- : volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
+ var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
+ // Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
+ // also are treated like specials
+ _logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment);
+ // If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest
+ // vs Historys strongest), this code will break and the duplicate will be skipped.
+ Chapter chapter = null;
+ try
+ {
+ chapter = specialTreatment
+ ? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename
+ || (c.Files.Select(f => f.FilePath)
+ .Contains(info.FullFilePath)))
+ : volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters);
+ return;
+ }
if (chapter == null)
{
chapter = new Chapter()
{
- Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
- Range = info.IsSpecial ? info.Filename : info.Chapters,
+ Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty,
+ Range = specialTreatment ? info.Filename : info.Chapters,
Files = new List(),
- IsSpecial = info.IsSpecial
+ IsSpecial = specialTreatment
};
volume.Chapters.Add(chapter);
}
-
- if (info.IsSpecial && chapter.Files.Count > 1)
- {
- // Split the Manga files into 2 separate chapters
- }
chapter.Files ??= new List();
- chapter.IsSpecial = info.IsSpecial;
+ chapter.IsSpecial = specialTreatment;
}
// Add files
foreach (var info in parsedInfos)
{
+ var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
Chapter chapter = null;
try
{
- chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (info.IsSpecial && c.Range == info.Filename));
+ chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (specialTreatment && c.Range == info.Filename));
}
catch (Exception ex)
{
- _logger.LogError(ex, "There was an exception parsing chapter. Skipping Vol {VolumeNumber} Chapter {ChapterNumber}", volume.Name, info.Chapters);
+ _logger.LogError(ex, "There was an exception parsing chapter. Skipping {SeriesName} Vol {VolumeNumber} Chapter {ChapterNumber} - Special treatment: {NeedsSpecialTreatment}", info.Series, volume.Name, info.Chapters, specialTreatment);
}
if (chapter == null) continue;
AddOrUpdateFileForChapter(chapter, info);
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
- chapter.Range = info.IsSpecial ? info.Filename : info.Chapters;
+ chapter.Range = specialTreatment ? info.Filename : info.Chapters;
chapter.Pages = chapter.Files.Sum(f => f.Pages);
- _metadataService.UpdateMetadata(chapter, _forceUpdate);
}
@@ -309,18 +316,14 @@ namespace API.Services.Tasks
var existingChapters = volume.Chapters.ToList();
foreach (var existingChapter in existingChapters)
{
- var hasInfo = existingChapter.IsSpecial ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
+ var specialTreatment = (existingChapter.IsSpecial || (existingChapter.Number == "0" && !int.TryParse(existingChapter.Range, out int i)));
+ var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
: parsedInfos.Any(v => v.Chapters == existingChapter.Range);
if (!hasInfo || !existingChapter.Files.Any())
{
volume.Chapters.Remove(existingChapter);
}
-
- // if (hasInfo && existingChapter.IsSpecial && existingChapter.Files.Count > 1)
- // {
- //
- // }
}
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
@@ -328,7 +331,7 @@ namespace API.Services.Tasks
}
///
- /// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing.
+ /// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
///
///
private void TrackSeries(ParserInfo info)
@@ -337,6 +340,7 @@ namespace API.Services.Tasks
// Check if normalized info.Series already exists and if so, update info to use that name instead
var normalizedSeries = Parser.Parser.Normalize(info.Series);
+ _logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
.Key;
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)