mirror of
https://github.com/Kareadita/Kavita.git
synced 2025-06-03 05:34:21 -04:00
Merge branch 'develop' of https://github.com/Kareadita/Kavita into develop
This commit is contained in:
commit
5c59b52ea7
@ -2,7 +2,7 @@
|
|||||||
using API.Comparators;
|
using API.Comparators;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests
|
namespace API.Tests.Comparers
|
||||||
{
|
{
|
||||||
public class ChapterSortComparerTest
|
public class ChapterSortComparerTest
|
||||||
{
|
{
|
31
API.Tests/Comparers/NaturalSortComparerTest.cs
Normal file
31
API.Tests/Comparers/NaturalSortComparerTest.cs
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
using System;
|
||||||
|
using API.Comparators;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
namespace API.Tests.Comparers
|
||||||
|
{
|
||||||
|
public class NaturalSortComparerTest
|
||||||
|
{
|
||||||
|
[Theory]
|
||||||
|
[InlineData(
|
||||||
|
new[] {"x1.jpg", "x10.jpg", "x3.jpg", "x4.jpg", "x11.jpg"},
|
||||||
|
new[] {"x1.jpg", "x3.jpg", "x4.jpg", "x10.jpg", "x11.jpg"}
|
||||||
|
)]
|
||||||
|
[InlineData(
|
||||||
|
new[] {"Beelzebub_153b_RHS.zip", "Beelzebub_01_[Noodles].zip",},
|
||||||
|
new[] {"Beelzebub_01_[Noodles].zip", "Beelzebub_153b_RHS.zip"}
|
||||||
|
)]
|
||||||
|
public void TestNaturalSortComparer(string[] input, string[] expected)
|
||||||
|
{
|
||||||
|
NaturalSortComparer nc = new NaturalSortComparer();
|
||||||
|
Array.Sort(input, nc);
|
||||||
|
|
||||||
|
var i = 0;
|
||||||
|
foreach (var s in input)
|
||||||
|
{
|
||||||
|
Assert.Equal(s, expected[i]);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -2,7 +2,7 @@
|
|||||||
using API.Comparators;
|
using API.Comparators;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
|
||||||
namespace API.Tests.Services
|
namespace API.Tests.Comparers
|
||||||
{
|
{
|
||||||
public class StringLogicalComparerTest
|
public class StringLogicalComparerTest
|
||||||
{
|
{
|
@ -59,6 +59,7 @@ namespace API.Tests
|
|||||||
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
|
[InlineData("Volume 12 - Janken Boy is Coming!.cbz", "12")]
|
||||||
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
|
[InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "20")]
|
||||||
[InlineData("Gantz.V26.cbz", "26")]
|
[InlineData("Gantz.V26.cbz", "26")]
|
||||||
|
[InlineData("NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar", "4")]
|
||||||
public void ParseVolumeTest(string filename, string expected)
|
public void ParseVolumeTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, ParseVolume(filename));
|
Assert.Equal(expected, ParseVolume(filename));
|
||||||
@ -176,6 +177,8 @@ namespace API.Tests
|
|||||||
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
|
[InlineData("Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz", "2")]
|
||||||
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
|
[InlineData("To Love Ru v09 Uncensored (Ch.071-079).cbz", "71-79")]
|
||||||
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
|
[InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter.rar", "0")]
|
||||||
|
[InlineData("Beelzebub_153b_RHS.zip", "153.5")]
|
||||||
|
[InlineData("Beelzebub_150-153b_RHS.zip", "150-153.5")]
|
||||||
public void ParseChaptersTest(string filename, string expected)
|
public void ParseChaptersTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, ParseChapter(filename));
|
Assert.Equal(expected, ParseChapter(filename));
|
||||||
@ -284,6 +287,7 @@ namespace API.Tests
|
|||||||
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
|
[InlineData("Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)", "Teen Titans")]
|
||||||
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")]
|
[InlineData("Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)", "Scott Pilgrim")]
|
||||||
[InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")]
|
[InlineData("Wolverine - Origins 003 (2006) (digital) (Minutemen-PhD)", "Wolverine - Origins")]
|
||||||
|
[InlineData("Invincible Vol 01 Family matters (2005) (Digital).cbr", "Invincible")]
|
||||||
public void ParseComicSeriesTest(string filename, string expected)
|
public void ParseComicSeriesTest(string filename, string expected)
|
||||||
{
|
{
|
||||||
Assert.Equal(expected, ParseComicSeries(filename));
|
Assert.Equal(expected, ParseComicSeries(filename));
|
||||||
|
@ -58,6 +58,9 @@ namespace API.Tests.Services
|
|||||||
[InlineData("file in folder in folder.zip", 1)]
|
[InlineData("file in folder in folder.zip", 1)]
|
||||||
[InlineData("file in folder.zip", 1)]
|
[InlineData("file in folder.zip", 1)]
|
||||||
[InlineData("file in folder_alt.zip", 1)]
|
[InlineData("file in folder_alt.zip", 1)]
|
||||||
|
[InlineData("macos_none.zip", 0)]
|
||||||
|
[InlineData("macos_one.zip", 1)]
|
||||||
|
[InlineData("macos_native.zip", 21)]
|
||||||
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
|
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
|
||||||
{
|
{
|
||||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
|
||||||
@ -118,6 +121,7 @@ namespace API.Tests.Services
|
|||||||
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
|
[InlineData("v10 - with folder.cbz", "v10 - with folder.expected.jpg")]
|
||||||
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")]
|
[InlineData("v10 - nested folder.cbz", "v10 - nested folder.expected.jpg")]
|
||||||
//[InlineData("png.zip", "png.PNG")]
|
//[InlineData("png.zip", "png.PNG")]
|
||||||
|
[InlineData("macos_native.zip", "macos_native.jpg")]
|
||||||
public void GetCoverImageTest(string inputFile, string expectedOutputFile)
|
public void GetCoverImageTest(string inputFile, string expectedOutputFile)
|
||||||
{
|
{
|
||||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages");
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/CoverImages");
|
||||||
@ -128,10 +132,11 @@ namespace API.Tests.Services
|
|||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
[Theory]
|
||||||
[InlineData("06_v01[DMM].zip")]
|
[InlineData("Archives/macos_native.zip")]
|
||||||
|
[InlineData("Formats/One File with DB_Supported.zip")]
|
||||||
public void CanParseCoverImage(string inputFile)
|
public void CanParseCoverImage(string inputFile)
|
||||||
{
|
{
|
||||||
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
|
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/");
|
||||||
Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile)));
|
Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
After Width: | Height: | Size: 53 KiB |
Binary file not shown.
Binary file not shown.
@ -25,6 +25,7 @@
|
|||||||
</PackageReference>
|
</PackageReference>
|
||||||
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.4" />
|
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.4" />
|
||||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.1" />
|
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.1" />
|
||||||
|
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="2.0.0" />
|
||||||
<PackageReference Include="NetVips" Version="2.0.0" />
|
<PackageReference Include="NetVips" Version="2.0.0" />
|
||||||
<PackageReference Include="NetVips.Native" Version="8.10.6" />
|
<PackageReference Include="NetVips.Native" Version="8.10.6" />
|
||||||
<PackageReference Include="NReco.Logging.File" Version="1.1.1" />
|
<PackageReference Include="NReco.Logging.File" Version="1.1.1" />
|
||||||
|
@ -4,17 +4,6 @@ namespace API.Comparators
|
|||||||
{
|
{
|
||||||
public class ChapterSortComparer : IComparer<float>
|
public class ChapterSortComparer : IComparer<float>
|
||||||
{
|
{
|
||||||
// public int Compare(int x, int y)
|
|
||||||
// {
|
|
||||||
// if (x == 0 && y == 0) return 0;
|
|
||||||
// // if x is 0, it comes second
|
|
||||||
// if (x == 0) return 1;
|
|
||||||
// // if y is 0, it comes second
|
|
||||||
// if (y == 0) return -1;
|
|
||||||
//
|
|
||||||
// return x.CompareTo(y);
|
|
||||||
// }
|
|
||||||
|
|
||||||
public int Compare(float x, float y)
|
public int Compare(float x, float y)
|
||||||
{
|
{
|
||||||
if (x == 0.0 && y == 0.0) return 0;
|
if (x == 0.0 && y == 0.0) return 0;
|
||||||
|
95
API/Comparators/NaturalSortComparer.cs
Normal file
95
API/Comparators/NaturalSortComparer.cs
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Text.RegularExpressions;
|
||||||
|
using static System.GC;
|
||||||
|
|
||||||
|
namespace API.Comparators
|
||||||
|
{
|
||||||
|
public class NaturalSortComparer : IComparer<string>, IDisposable
|
||||||
|
{
|
||||||
|
private readonly bool _isAscending;
|
||||||
|
|
||||||
|
public NaturalSortComparer(bool inAscendingOrder = true)
|
||||||
|
{
|
||||||
|
_isAscending = inAscendingOrder;
|
||||||
|
}
|
||||||
|
|
||||||
|
#region IComparer<string> Members
|
||||||
|
|
||||||
|
public int Compare(string x, string y)
|
||||||
|
{
|
||||||
|
throw new NotImplementedException();
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
#region IComparer<string> Members
|
||||||
|
|
||||||
|
int IComparer<string>.Compare(string x, string y)
|
||||||
|
{
|
||||||
|
if (x == y)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
string[] x1, y1;
|
||||||
|
|
||||||
|
if (!_table.TryGetValue(x, out x1))
|
||||||
|
{
|
||||||
|
x1 = Regex.Split(x.Replace(" ", ""), "([0-9]+)");
|
||||||
|
_table.Add(x, x1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!_table.TryGetValue(y ?? string.Empty, out y1))
|
||||||
|
{
|
||||||
|
y1 = Regex.Split(y?.Replace(" ", ""), "([0-9]+)");
|
||||||
|
_table.Add(y, y1);
|
||||||
|
}
|
||||||
|
|
||||||
|
int returnVal;
|
||||||
|
|
||||||
|
for (var i = 0; i < x1.Length && i < y1.Length; i++)
|
||||||
|
{
|
||||||
|
if (x1[i] == y1[i]) continue;
|
||||||
|
returnVal = PartCompare(x1[i], y1[i]);
|
||||||
|
return _isAscending ? returnVal : -returnVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (y1.Length > x1.Length)
|
||||||
|
{
|
||||||
|
returnVal = 1;
|
||||||
|
}
|
||||||
|
else if (x1.Length > y1.Length)
|
||||||
|
{
|
||||||
|
returnVal = -1;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
returnVal = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return _isAscending ? returnVal : -returnVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static int PartCompare(string left, string right)
|
||||||
|
{
|
||||||
|
int x, y;
|
||||||
|
if (!int.TryParse(left, out x))
|
||||||
|
return left.CompareTo(right);
|
||||||
|
|
||||||
|
if (!int.TryParse(right, out y))
|
||||||
|
return left.CompareTo(right);
|
||||||
|
|
||||||
|
return x.CompareTo(y);
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
|
||||||
|
private Dictionary<string, string[]> _table = new Dictionary<string, string[]>();
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
SuppressFinalize(this);
|
||||||
|
_table.Clear();
|
||||||
|
_table = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -7,6 +7,7 @@ namespace API.Controllers
|
|||||||
public class FallbackController : Controller
|
public class FallbackController : Controller
|
||||||
{
|
{
|
||||||
// ReSharper disable once S4487
|
// ReSharper disable once S4487
|
||||||
|
// ReSharper disable once NotAccessedField.Local
|
||||||
private readonly ITaskScheduler _taskScheduler;
|
private readonly ITaskScheduler _taskScheduler;
|
||||||
|
|
||||||
public FallbackController(ITaskScheduler taskScheduler)
|
public FallbackController(ITaskScheduler taskScheduler)
|
||||||
|
@ -155,7 +155,7 @@ namespace API.Controllers
|
|||||||
[HttpPost("refresh-metadata")]
|
[HttpPost("refresh-metadata")]
|
||||||
public ActionResult RefreshMetadata(int libraryId)
|
public ActionResult RefreshMetadata(int libraryId)
|
||||||
{
|
{
|
||||||
_taskScheduler.ScanLibrary(libraryId, true);
|
_taskScheduler.RefreshMetadata(libraryId);
|
||||||
return Ok();
|
return Ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -164,23 +164,7 @@ namespace API.Controllers
|
|||||||
{
|
{
|
||||||
return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername()));
|
return Ok(await _unitOfWork.LibraryRepository.GetLibraryDtosForUsernameAsync(User.GetUsername()));
|
||||||
}
|
}
|
||||||
|
|
||||||
[HttpGet("series")]
|
|
||||||
public async Task<ActionResult<IEnumerable<Series>>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams)
|
|
||||||
{
|
|
||||||
// TODO: Move this to SeriesController
|
|
||||||
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
|
|
||||||
var series =
|
|
||||||
await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams);
|
|
||||||
|
|
||||||
// Apply progress/rating information (I can't work out how to do this in initial query)
|
|
||||||
await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series);
|
|
||||||
|
|
||||||
Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages);
|
|
||||||
|
|
||||||
return Ok(series);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Authorize(Policy = "RequireAdminRole")]
|
[Authorize(Policy = "RequireAdminRole")]
|
||||||
[HttpDelete("delete")]
|
[HttpDelete("delete")]
|
||||||
public async Task<ActionResult<bool>> DeleteLibrary(int libraryId)
|
public async Task<ActionResult<bool>> DeleteLibrary(int libraryId)
|
||||||
|
@ -3,7 +3,6 @@ using System.Collections.Generic;
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Data;
|
|
||||||
using API.DTOs;
|
using API.DTOs;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
|
@ -3,6 +3,7 @@ using System.Threading.Tasks;
|
|||||||
using API.DTOs;
|
using API.DTOs;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
|
using API.Helpers;
|
||||||
using API.Interfaces;
|
using API.Interfaces;
|
||||||
using Microsoft.AspNetCore.Authorization;
|
using Microsoft.AspNetCore.Authorization;
|
||||||
using Microsoft.AspNetCore.Mvc;
|
using Microsoft.AspNetCore.Mvc;
|
||||||
@ -23,6 +24,21 @@ namespace API.Controllers
|
|||||||
_unitOfWork = unitOfWork;
|
_unitOfWork = unitOfWork;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[HttpGet]
|
||||||
|
public async Task<ActionResult<IEnumerable<Series>>> GetSeriesForLibrary(int libraryId, [FromQuery] UserParams userParams)
|
||||||
|
{
|
||||||
|
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
|
||||||
|
var series =
|
||||||
|
await _unitOfWork.SeriesRepository.GetSeriesDtoForLibraryIdAsync(libraryId, user.Id, userParams);
|
||||||
|
|
||||||
|
// Apply progress/rating information (I can't work out how to do this in initial query)
|
||||||
|
await _unitOfWork.SeriesRepository.AddSeriesModifiers(user.Id, series);
|
||||||
|
|
||||||
|
Response.AddPaginationHeader(series.CurrentPage, series.PageSize, series.TotalCount, series.TotalPages);
|
||||||
|
|
||||||
|
return Ok(series);
|
||||||
|
}
|
||||||
|
|
||||||
[HttpGet("{seriesId}")]
|
[HttpGet("{seriesId}")]
|
||||||
public async Task<ActionResult<SeriesDto>> GetSeries(int seriesId)
|
public async Task<ActionResult<SeriesDto>> GetSeries(int seriesId)
|
||||||
{
|
{
|
||||||
@ -105,11 +121,9 @@ namespace API.Controllers
|
|||||||
|
|
||||||
if (series == null) return BadRequest("Series does not exist");
|
if (series == null) return BadRequest("Series does not exist");
|
||||||
|
|
||||||
// TODO: Ensure we check against Library for Series Name change
|
if (series.Name != updateSeries.Name && await _unitOfWork.SeriesRepository.DoesSeriesNameExistInLibrary(updateSeries.Name))
|
||||||
var existingSeries = await _unitOfWork.SeriesRepository.GetSeriesByNameAsync(updateSeries.Name);
|
|
||||||
if (existingSeries != null && existingSeries.Id != series.Id )
|
|
||||||
{
|
{
|
||||||
return BadRequest("A series already exists with this name. Name must be unique.");
|
return BadRequest("A series already exists in this library with this name. Series Names must be unique to a library.");
|
||||||
}
|
}
|
||||||
series.Name = updateSeries.Name;
|
series.Name = updateSeries.Name;
|
||||||
series.LocalizedName = updateSeries.LocalizedName;
|
series.LocalizedName = updateSeries.LocalizedName;
|
||||||
|
@ -3,7 +3,6 @@ using System.IO;
|
|||||||
using System.IO.Compression;
|
using System.IO.Compression;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
using API.Interfaces;
|
|
||||||
using API.Interfaces.Services;
|
using API.Interfaces.Services;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using Microsoft.AspNetCore.Authorization;
|
using Microsoft.AspNetCore.Authorization;
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
public string Name { get; init; }
|
public string Name { get; init; }
|
||||||
public string OriginalName { get; init; }
|
public string OriginalName { get; init; }
|
||||||
public string SortName { get; init; }
|
public string SortName { get; init; }
|
||||||
|
public string LocalizedName { get; init; }
|
||||||
|
|
||||||
// Grouping information
|
// Grouping information
|
||||||
public string LibraryName { get; set; }
|
public string LibraryName { get; set; }
|
||||||
|
32
API/Data/AppUserProgressRepository.cs
Normal file
32
API/Data/AppUserProgressRepository.cs
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
using System.Linq;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using API.Interfaces;
|
||||||
|
using Microsoft.EntityFrameworkCore;
|
||||||
|
|
||||||
|
namespace API.Data
|
||||||
|
{
|
||||||
|
public class AppUserProgressRepository : IAppUserProgressRepository
|
||||||
|
{
|
||||||
|
private readonly DataContext _context;
|
||||||
|
|
||||||
|
public AppUserProgressRepository(DataContext context)
|
||||||
|
{
|
||||||
|
_context = context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// This will remove any entries that have chapterIds that no longer exists. This will execute the save as well.
|
||||||
|
/// </summary>
|
||||||
|
public async Task<bool> CleanupAbandonedChapters()
|
||||||
|
{
|
||||||
|
var chapterIds = _context.Chapter.Select(c => c.Id);
|
||||||
|
|
||||||
|
var rowsToRemove = await _context.AppUserProgresses
|
||||||
|
.Where(progress => !chapterIds.Contains(progress.ChapterId))
|
||||||
|
.ToListAsync();
|
||||||
|
|
||||||
|
_context.RemoveRange(rowsToRemove);
|
||||||
|
return (await _context.SaveChangesAsync()) > 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -35,15 +35,13 @@ namespace API.Data
|
|||||||
|
|
||||||
public async Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName)
|
public async Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName)
|
||||||
{
|
{
|
||||||
Stopwatch sw = Stopwatch.StartNew();
|
return await _context.Library
|
||||||
var libs = await _context.Library
|
|
||||||
.Include(l => l.AppUsers)
|
.Include(l => l.AppUsers)
|
||||||
.Where(library => library.AppUsers.Any(x => x.UserName == userName))
|
.Where(library => library.AppUsers.Any(x => x.UserName == userName))
|
||||||
|
.OrderBy(l => l.Name)
|
||||||
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
|
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
|
||||||
.AsNoTracking()
|
.AsNoTracking()
|
||||||
.ToListAsync();
|
.ToListAsync();
|
||||||
Console.WriteLine("Processed GetLibraryDtosForUsernameAsync in {0} milliseconds", sw.ElapsedMilliseconds);
|
|
||||||
return libs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<IEnumerable<Library>> GetLibrariesAsync()
|
public async Task<IEnumerable<Library>> GetLibrariesAsync()
|
||||||
@ -73,7 +71,10 @@ namespace API.Data
|
|||||||
{
|
{
|
||||||
return await _context.Library
|
return await _context.Library
|
||||||
.Include(f => f.Folders)
|
.Include(f => f.Folders)
|
||||||
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider).ToListAsync();
|
.OrderBy(l => l.Name)
|
||||||
|
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
|
||||||
|
.AsNoTracking()
|
||||||
|
.ToListAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<Library> GetLibraryForIdAsync(int libraryId)
|
public async Task<Library> GetLibraryForIdAsync(int libraryId)
|
||||||
@ -98,19 +99,25 @@ namespace API.Data
|
|||||||
.ThenInclude(s => s.Volumes)
|
.ThenInclude(s => s.Volumes)
|
||||||
.ThenInclude(v => v.Chapters)
|
.ThenInclude(v => v.Chapters)
|
||||||
.ThenInclude(c => c.Files)
|
.ThenInclude(c => c.Files)
|
||||||
|
.AsSplitQuery()
|
||||||
.SingleAsync();
|
.SingleAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<bool> LibraryExists(string libraryName)
|
public async Task<bool> LibraryExists(string libraryName)
|
||||||
{
|
{
|
||||||
return await _context.Library.AnyAsync(x => x.Name == libraryName);
|
return await _context.Library
|
||||||
|
.AsNoTracking()
|
||||||
|
.AnyAsync(x => x.Name == libraryName);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<IEnumerable<LibraryDto>> GetLibrariesForUserAsync(AppUser user)
|
public async Task<IEnumerable<LibraryDto>> GetLibrariesForUserAsync(AppUser user)
|
||||||
{
|
{
|
||||||
return await _context.Library.Where(library => library.AppUsers.Contains(user))
|
return await _context.Library
|
||||||
|
.Where(library => library.AppUsers.Contains(user))
|
||||||
.Include(l => l.Folders)
|
.Include(l => l.Folders)
|
||||||
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider).ToListAsync();
|
.AsNoTracking()
|
||||||
|
.ProjectTo<LibraryDto>(_mapper.ConfigurationProvider)
|
||||||
|
.ToListAsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,6 +51,19 @@ namespace API.Data
|
|||||||
{
|
{
|
||||||
return await _context.Series.SingleOrDefaultAsync(x => x.Name == name);
|
return await _context.Series.SingleOrDefaultAsync(x => x.Name == name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task<bool> DoesSeriesNameExistInLibrary(string name)
|
||||||
|
{
|
||||||
|
var libraries = _context.Series
|
||||||
|
.AsNoTracking()
|
||||||
|
.Where(x => x.Name == name)
|
||||||
|
.Select(s => s.LibraryId);
|
||||||
|
|
||||||
|
return await _context.Series
|
||||||
|
.AsNoTracking()
|
||||||
|
.Where(s => libraries.Contains(s.LibraryId) && s.Name == name)
|
||||||
|
.CountAsync() > 1;
|
||||||
|
}
|
||||||
|
|
||||||
public Series GetSeriesByName(string name)
|
public Series GetSeriesByName(string name)
|
||||||
{
|
{
|
||||||
|
@ -29,6 +29,8 @@ namespace API.Data
|
|||||||
public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper);
|
public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper);
|
||||||
|
|
||||||
public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper);
|
public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper);
|
||||||
|
|
||||||
|
public IAppUserProgressRepository AppUserProgressRepository => new AppUserProgressRepository(_context);
|
||||||
|
|
||||||
public async Task<bool> Complete()
|
public async Task<bool> Complete()
|
||||||
{
|
{
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
|
|
||||||
using System;
|
using System;
|
||||||
using API.Entities.Enums;
|
using API.Entities.Enums;
|
||||||
using API.Entities.Interfaces;
|
|
||||||
|
|
||||||
namespace API.Entities
|
namespace API.Entities
|
||||||
{
|
{
|
||||||
|
@ -4,7 +4,6 @@ using API.Interfaces;
|
|||||||
using API.Interfaces.Services;
|
using API.Interfaces.Services;
|
||||||
using API.Services;
|
using API.Services;
|
||||||
using API.Services.Tasks;
|
using API.Services.Tasks;
|
||||||
using AutoMapper;
|
|
||||||
using Microsoft.EntityFrameworkCore;
|
using Microsoft.EntityFrameworkCore;
|
||||||
using Microsoft.Extensions.Configuration;
|
using Microsoft.Extensions.Configuration;
|
||||||
using Microsoft.Extensions.DependencyInjection;
|
using Microsoft.Extensions.DependencyInjection;
|
||||||
|
@ -5,7 +5,6 @@ namespace API.Helpers.Converters
|
|||||||
{
|
{
|
||||||
public static class CronConverter
|
public static class CronConverter
|
||||||
{
|
{
|
||||||
// TODO: this isn't used. Replace strings with Enums?
|
|
||||||
public static readonly IEnumerable<string> Options = new []
|
public static readonly IEnumerable<string> Options = new []
|
||||||
{
|
{
|
||||||
"disabled",
|
"disabled",
|
||||||
|
9
API/Interfaces/IAppUserProgressRepository.cs
Normal file
9
API/Interfaces/IAppUserProgressRepository.cs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
|
namespace API.Interfaces
|
||||||
|
{
|
||||||
|
public interface IAppUserProgressRepository
|
||||||
|
{
|
||||||
|
Task<bool> CleanupAbandonedChapters();
|
||||||
|
}
|
||||||
|
}
|
@ -11,6 +11,7 @@ namespace API.Interfaces
|
|||||||
void Add(Series series);
|
void Add(Series series);
|
||||||
void Update(Series series);
|
void Update(Series series);
|
||||||
Task<Series> GetSeriesByNameAsync(string name);
|
Task<Series> GetSeriesByNameAsync(string name);
|
||||||
|
Task<bool> DoesSeriesNameExistInLibrary(string name);
|
||||||
Series GetSeriesByName(string name);
|
Series GetSeriesByName(string name);
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
@ -9,6 +9,7 @@ namespace API.Interfaces
|
|||||||
ILibraryRepository LibraryRepository { get; }
|
ILibraryRepository LibraryRepository { get; }
|
||||||
IVolumeRepository VolumeRepository { get; }
|
IVolumeRepository VolumeRepository { get; }
|
||||||
ISettingsRepository SettingsRepository { get; }
|
ISettingsRepository SettingsRepository { get; }
|
||||||
|
IAppUserProgressRepository AppUserProgressRepository { get; }
|
||||||
Task<bool> Complete();
|
Task<bool> Complete();
|
||||||
bool HasChanges();
|
bool HasChanges();
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
namespace API.Interfaces.Services
|
|
||||||
|
namespace API.Interfaces.Services
|
||||||
{
|
{
|
||||||
public interface IScannerService
|
public interface IScannerService
|
||||||
{
|
{
|
||||||
@ -9,7 +10,6 @@
|
|||||||
/// <param name="libraryId">Library to scan against</param>
|
/// <param name="libraryId">Library to scan against</param>
|
||||||
/// <param name="forceUpdate">Force overwriting for cover images</param>
|
/// <param name="forceUpdate">Force overwriting for cover images</param>
|
||||||
void ScanLibrary(int libraryId, bool forceUpdate);
|
void ScanLibrary(int libraryId, bool forceUpdate);
|
||||||
|
|
||||||
void ScanLibraries();
|
void ScanLibraries();
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -9,11 +9,11 @@ namespace API.Parser
|
|||||||
{
|
{
|
||||||
public static class Parser
|
public static class Parser
|
||||||
{
|
{
|
||||||
public static readonly string MangaFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip";
|
public static readonly string ArchiveFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip";
|
||||||
public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)";
|
public static readonly string ImageFileExtensions = @"^(\.png|\.jpeg|\.jpg)";
|
||||||
private static readonly string XmlRegexExtensions = @"\.xml";
|
private static readonly string XmlRegexExtensions = @"\.xml";
|
||||||
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||||
private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||||
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||||
|
|
||||||
private static readonly Regex[] MangaVolumeRegex = new[]
|
private static readonly Regex[] MangaVolumeRegex = new[]
|
||||||
@ -22,6 +22,10 @@ namespace API.Parser
|
|||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
|
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
|
// NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar
|
||||||
|
new Regex(
|
||||||
|
@"(?<Series>.*)(\b|_)(?!\[)(vol\.?)(?<Volume>\d+(-\d+)?)(?!\])",
|
||||||
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
|
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
|
||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*)(\b|_)(?!\[)v(?<Volume>\d+(-\d+)?)(?!\])",
|
@"(?<Series>.*)(\b|_)(?!\[)v(?<Volume>\d+(-\d+)?)(?!\])",
|
||||||
@ -144,6 +148,10 @@ namespace API.Parser
|
|||||||
|
|
||||||
private static readonly Regex[] ComicSeriesRegex = new[]
|
private static readonly Regex[] ComicSeriesRegex = new[]
|
||||||
{
|
{
|
||||||
|
// Invincible Vol 01 Family matters (2005) (Digital)
|
||||||
|
new Regex(
|
||||||
|
@"(?<Series>.*)(\b|_)(vol\.?)( |_)(?<Volume>\d+(-\d+)?)",
|
||||||
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
|
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
|
||||||
new Regex(
|
new Regex(
|
||||||
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
|
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
|
||||||
@ -275,9 +283,9 @@ namespace API.Parser
|
|||||||
new Regex(
|
new Regex(
|
||||||
@"(?<Series>.*) S(?<Volume>\d+) (?<Chapter>\d+(?:.\d+|-\d+)?)",
|
@"(?<Series>.*) S(?<Volume>\d+) (?<Chapter>\d+(?:.\d+|-\d+)?)",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Beelzebub_01_[Noodles].zip
|
// Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip
|
||||||
new Regex(
|
new Regex(
|
||||||
@"^((?!v|vo|vol|Volume).)*( |_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)( |_|\[|\()",
|
@"^((?!v|vo|vol|Volume).)*( |_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<ChapterPart>b)?( |_|\[|\()",
|
||||||
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
RegexOptions.IgnoreCase | RegexOptions.Compiled),
|
||||||
// Yumekui-Merry_DKThias_Chapter21.zip
|
// Yumekui-Merry_DKThias_Chapter21.zip
|
||||||
new Regex(
|
new Regex(
|
||||||
@ -531,12 +539,16 @@ namespace API.Parser
|
|||||||
if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue;
|
if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue;
|
||||||
|
|
||||||
var value = match.Groups["Chapter"].Value;
|
var value = match.Groups["Chapter"].Value;
|
||||||
|
var hasChapterPart = match.Groups["ChapterPart"].Success;
|
||||||
|
|
||||||
if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Chapter"].Value);
|
if (!value.Contains("-"))
|
||||||
|
{
|
||||||
|
return RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(value) : value);
|
||||||
|
}
|
||||||
|
|
||||||
var tokens = value.Split("-");
|
var tokens = value.Split("-");
|
||||||
var from = RemoveLeadingZeroes(tokens[0]);
|
var from = RemoveLeadingZeroes(tokens[0]);
|
||||||
var to = RemoveLeadingZeroes(tokens[1]);
|
var to = RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(tokens[1]) : tokens[1]);
|
||||||
return $"{@from}-{to}";
|
return $"{@from}-{to}";
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -544,6 +556,16 @@ namespace API.Parser
|
|||||||
|
|
||||||
return "0";
|
return "0";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static string AddChapterPart(string value)
|
||||||
|
{
|
||||||
|
if (value.Contains("."))
|
||||||
|
{
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return $"{value}.5";
|
||||||
|
}
|
||||||
|
|
||||||
public static string ParseComicChapter(string filename)
|
public static string ParseComicChapter(string filename)
|
||||||
{
|
{
|
||||||
@ -697,7 +719,7 @@ namespace API.Parser
|
|||||||
|
|
||||||
public static bool IsArchive(string filePath)
|
public static bool IsArchive(string filePath)
|
||||||
{
|
{
|
||||||
return MangaFileRegex.IsMatch(Path.GetExtension(filePath));
|
return ArchiveFileRegex.IsMatch(Path.GetExtension(filePath));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static bool IsImage(string filePath)
|
public static bool IsImage(string filePath)
|
||||||
|
@ -10,6 +10,7 @@ using API.Extensions;
|
|||||||
using API.Interfaces.Services;
|
using API.Interfaces.Services;
|
||||||
using API.Services.Tasks;
|
using API.Services.Tasks;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
|
using Microsoft.IO;
|
||||||
using SharpCompress.Archives;
|
using SharpCompress.Archives;
|
||||||
using SharpCompress.Common;
|
using SharpCompress.Common;
|
||||||
using Image = NetVips.Image;
|
using Image = NetVips.Image;
|
||||||
@ -22,7 +23,8 @@ namespace API.Services
|
|||||||
public class ArchiveService : IArchiveService
|
public class ArchiveService : IArchiveService
|
||||||
{
|
{
|
||||||
private readonly ILogger<ArchiveService> _logger;
|
private readonly ILogger<ArchiveService> _logger;
|
||||||
private const int ThumbnailWidth = 320; // 153w x 230h TODO: Look into optimizing the images to be smaller
|
private const int ThumbnailWidth = 320; // 153w x 230h
|
||||||
|
private static readonly RecyclableMemoryStreamManager _streamManager = new RecyclableMemoryStreamManager();
|
||||||
|
|
||||||
public ArchiveService(ILogger<ArchiveService> logger)
|
public ArchiveService(ILogger<ArchiveService> logger)
|
||||||
{
|
{
|
||||||
@ -74,13 +76,15 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogDebug("Using default compression handling");
|
_logger.LogDebug("Using default compression handling");
|
||||||
using ZipArchive archive = ZipFile.OpenRead(archivePath);
|
using ZipArchive archive = ZipFile.OpenRead(archivePath);
|
||||||
return archive.Entries.Count(e => Parser.Parser.IsImage(e.FullName));
|
return archive.Entries.Count(e => !e.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(e.FullName));
|
||||||
}
|
}
|
||||||
case ArchiveLibrary.SharpCompress:
|
case ArchiveLibrary.SharpCompress:
|
||||||
{
|
{
|
||||||
_logger.LogDebug("Using SharpCompress compression handling");
|
_logger.LogDebug("Using SharpCompress compression handling");
|
||||||
using var archive = ArchiveFactory.Open(archivePath);
|
using var archive = ArchiveFactory.Open(archivePath);
|
||||||
return archive.Entries.Count(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key));
|
return archive.Entries.Count(entry => !entry.IsDirectory &&
|
||||||
|
!(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||||
|
&& Parser.Parser.IsImage(entry.Key));
|
||||||
}
|
}
|
||||||
case ArchiveLibrary.NotSupported:
|
case ArchiveLibrary.NotSupported:
|
||||||
_logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
|
_logger.LogError("[GetNumberOfPagesFromArchive] This archive cannot be read: {ArchivePath}. Defaulting to 0 pages", archivePath);
|
||||||
@ -117,8 +121,8 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogDebug("Using default compression handling");
|
_logger.LogDebug("Using default compression handling");
|
||||||
using var archive = ZipFile.OpenRead(archivePath);
|
using var archive = ZipFile.OpenRead(archivePath);
|
||||||
var folder = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
var folder = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "folder");
|
||||||
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
|
var entries = archive.Entries.Where(x => Path.HasExtension(x.FullName) && !x.FullName.Contains("__MACOSX") && Parser.Parser.IsImage(x.FullName)).OrderBy(x => x.FullName).ToList();
|
||||||
var entry = folder ?? entries[0];
|
var entry = folder ?? entries[0];
|
||||||
|
|
||||||
return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry);
|
return createThumbnail ? CreateThumbnail(entry) : ConvertEntryToByteArray(entry);
|
||||||
@ -127,7 +131,9 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogDebug("Using SharpCompress compression handling");
|
_logger.LogDebug("Using SharpCompress compression handling");
|
||||||
using var archive = ArchiveFactory.Open(archivePath);
|
using var archive = ArchiveFactory.Open(archivePath);
|
||||||
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
|
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory
|
||||||
|
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||||
|
&& Parser.Parser.IsImage(entry.Key)), createThumbnail);
|
||||||
}
|
}
|
||||||
case ArchiveLibrary.NotSupported:
|
case ArchiveLibrary.NotSupported:
|
||||||
_logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath);
|
_logger.LogError("[GetCoverImage] This archive cannot be read: {ArchivePath}. Defaulting to no cover image", archivePath);
|
||||||
@ -152,10 +158,11 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder")
|
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower() == "folder")
|
||||||
{
|
{
|
||||||
using var ms = new MemoryStream();
|
using var ms = _streamManager.GetStream();
|
||||||
entry.WriteTo(ms);
|
entry.WriteTo(ms);
|
||||||
ms.Position = 0;
|
ms.Position = 0;
|
||||||
return createThumbnail ? CreateThumbnail(ms.ToArray(), Path.GetExtension(entry.Key)) : ms.ToArray();
|
var data = ms.ToArray();
|
||||||
|
return createThumbnail ? CreateThumbnail(data, Path.GetExtension(entry.Key)) : data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -163,7 +170,7 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
var entry = images.OrderBy(e => e.Key).FirstOrDefault();
|
var entry = images.OrderBy(e => e.Key).FirstOrDefault();
|
||||||
if (entry == null) return Array.Empty<byte>();
|
if (entry == null) return Array.Empty<byte>();
|
||||||
using var ms = new MemoryStream();
|
using var ms = _streamManager.GetStream();
|
||||||
entry.WriteTo(ms);
|
entry.WriteTo(ms);
|
||||||
ms.Position = 0;
|
ms.Position = 0;
|
||||||
var data = ms.ToArray();
|
var data = ms.ToArray();
|
||||||
@ -176,11 +183,9 @@ namespace API.Services
|
|||||||
private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry)
|
private static byte[] ConvertEntryToByteArray(ZipArchiveEntry entry)
|
||||||
{
|
{
|
||||||
using var stream = entry.Open();
|
using var stream = entry.Open();
|
||||||
using var ms = new MemoryStream();
|
using var ms = _streamManager.GetStream();
|
||||||
stream.CopyTo(ms);
|
stream.CopyTo(ms);
|
||||||
var data = ms.ToArray();
|
return ms.ToArray();
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -194,7 +199,7 @@ namespace API.Services
|
|||||||
// Sometimes ZipArchive will list the directory and others it will just keep it in the FullName
|
// Sometimes ZipArchive will list the directory and others it will just keep it in the FullName
|
||||||
return archive.Entries.Count > 0 &&
|
return archive.Entries.Count > 0 &&
|
||||||
!Path.HasExtension(archive.Entries.ElementAt(0).FullName) ||
|
!Path.HasExtension(archive.Entries.ElementAt(0).FullName) ||
|
||||||
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar));
|
archive.Entries.Any(e => e.FullName.Contains(Path.AltDirectorySeparatorChar) && !e.FullName.Contains("__MACOSX"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg")
|
private byte[] CreateThumbnail(byte[] entry, string formatExtension = ".jpg")
|
||||||
@ -211,7 +216,7 @@ namespace API.Services
|
|||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
_logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image");
|
_logger.LogError(ex, "[CreateThumbnail] There was a critical error and prevented thumbnail generation. Defaulting to no cover image. Format Extension {Extension}", formatExtension);
|
||||||
}
|
}
|
||||||
|
|
||||||
return Array.Empty<byte>();
|
return Array.Empty<byte>();
|
||||||
@ -263,7 +268,7 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key))
|
if (Path.GetFileNameWithoutExtension(entry.Key).ToLower().EndsWith("comicinfo") && Parser.Parser.IsXml(entry.Key))
|
||||||
{
|
{
|
||||||
using var ms = new MemoryStream();
|
using var ms = _streamManager.GetStream();
|
||||||
entry.WriteTo(ms);
|
entry.WriteTo(ms);
|
||||||
ms.Position = 0;
|
ms.Position = 0;
|
||||||
|
|
||||||
@ -295,7 +300,7 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogDebug("Using default compression handling");
|
_logger.LogDebug("Using default compression handling");
|
||||||
using var archive = ZipFile.OpenRead(archivePath);
|
using var archive = ZipFile.OpenRead(archivePath);
|
||||||
var entry = archive.Entries.SingleOrDefault(x => Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
|
var entry = archive.Entries.SingleOrDefault(x => !x.FullName.Contains("__MACOSX") && Path.GetFileNameWithoutExtension(x.Name).ToLower() == "comicinfo" && Parser.Parser.IsXml(x.FullName));
|
||||||
if (entry != null)
|
if (entry != null)
|
||||||
{
|
{
|
||||||
using var stream = entry.Open();
|
using var stream = entry.Open();
|
||||||
@ -308,7 +313,9 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogDebug("Using SharpCompress compression handling");
|
_logger.LogDebug("Using SharpCompress compression handling");
|
||||||
using var archive = ArchiveFactory.Open(archivePath);
|
using var archive = ArchiveFactory.Open(archivePath);
|
||||||
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
|
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
|
||||||
|
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||||
|
&& Parser.Parser.IsXml(entry.Key)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ArchiveLibrary.NotSupported:
|
case ArchiveLibrary.NotSupported:
|
||||||
@ -392,7 +399,9 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
_logger.LogDebug("Using SharpCompress compression handling");
|
_logger.LogDebug("Using SharpCompress compression handling");
|
||||||
using var archive = ArchiveFactory.Open(archivePath);
|
using var archive = ArchiveFactory.Open(archivePath);
|
||||||
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
|
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
|
||||||
|
&& !(Path.GetDirectoryName(entry.Key) ?? string.Empty).Contains("__MACOSX")
|
||||||
|
&& Parser.Parser.IsImage(entry.Key)), extractPath);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ArchiveLibrary.NotSupported:
|
case ArchiveLibrary.NotSupported:
|
||||||
|
@ -2,14 +2,14 @@
|
|||||||
{
|
{
|
||||||
public class ComicInfo
|
public class ComicInfo
|
||||||
{
|
{
|
||||||
public string Summary;
|
public string Summary { get; set; }
|
||||||
public string Title;
|
public string Title { get; set; }
|
||||||
public string Series;
|
public string Series { get; set; }
|
||||||
public string Notes;
|
public string Notes { get; set; }
|
||||||
public string Publisher;
|
public string Publisher { get; set; }
|
||||||
public string Genre;
|
public string Genre { get; set; }
|
||||||
public int PageCount;
|
public int PageCount { get; set; }
|
||||||
public string LanguageISO;
|
public string LanguageISO { get; set; }
|
||||||
public string Web;
|
public string Web { get; set; }
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -6,10 +6,8 @@ using System.Linq;
|
|||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using API.DTOs;
|
|
||||||
using API.Interfaces.Services;
|
using API.Interfaces.Services;
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
using NetVips;
|
|
||||||
|
|
||||||
namespace API.Services
|
namespace API.Services
|
||||||
{
|
{
|
||||||
@ -60,6 +58,7 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
|
rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
|
||||||
}
|
}
|
||||||
|
// NOTE: I Could use Path.GetRelativePath and split on separator character instead.
|
||||||
|
|
||||||
var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath;
|
var path = fullPath.EndsWith(separator) ? fullPath.Substring(0, fullPath.Length - 1) : fullPath;
|
||||||
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
|
var root = rootPath.EndsWith(separator) ? rootPath.Substring(0, rootPath.Length - 1) : rootPath;
|
||||||
|
@ -4,6 +4,7 @@ using System.Diagnostics;
|
|||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
|
using API.Comparators;
|
||||||
using API.Entities;
|
using API.Entities;
|
||||||
using API.Extensions;
|
using API.Extensions;
|
||||||
using API.Interfaces;
|
using API.Interfaces;
|
||||||
@ -45,9 +46,9 @@ namespace API.Services
|
|||||||
{
|
{
|
||||||
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
|
if (volume != null && ShouldFindCoverImage(volume.CoverImage, forceUpdate))
|
||||||
{
|
{
|
||||||
// TODO: Create a custom sorter for Chapters so it's consistent across the application
|
// TODO: Replace this with ChapterSortComparator
|
||||||
volume.Chapters ??= new List<Chapter>();
|
volume.Chapters ??= new List<Chapter>();
|
||||||
var firstChapter = volume.Chapters.OrderBy(x => Double.Parse(x.Number)).FirstOrDefault();
|
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number)).FirstOrDefault();
|
||||||
|
|
||||||
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
|
var firstFile = firstChapter?.Files.OrderBy(x => x.Chapter).FirstOrDefault();
|
||||||
// Skip calculating Cover Image (I/O) if the chapter already has it set
|
// Skip calculating Cover Image (I/O) if the chapter already has it set
|
||||||
@ -67,16 +68,29 @@ namespace API.Services
|
|||||||
|
|
||||||
public void UpdateMetadata(Series series, bool forceUpdate)
|
public void UpdateMetadata(Series series, bool forceUpdate)
|
||||||
{
|
{
|
||||||
|
// TODO: Use new ChapterSortComparer() here instead
|
||||||
if (series == null) return;
|
if (series == null) return;
|
||||||
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
|
if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
|
||||||
{
|
{
|
||||||
series.Volumes ??= new List<Volume>();
|
series.Volumes ??= new List<Volume>();
|
||||||
var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
|
var firstCover = series.Volumes.OrderBy(x => x.Number).FirstOrDefault(x => x.Number != 0);
|
||||||
|
byte[] coverImage = null;
|
||||||
if (firstCover == null && series.Volumes.Any())
|
if (firstCover == null && series.Volumes.Any())
|
||||||
{
|
{
|
||||||
firstCover = series.Volumes.FirstOrDefault(x => x.Number == 0);
|
// If firstCover is null and one volume, the whole series is Chapters under Vol 0.
|
||||||
|
if (series.Volumes.Count == 1)
|
||||||
|
{
|
||||||
|
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number))
|
||||||
|
.FirstOrDefault(c => !c.IsSpecial)?.CoverImage;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coverImage == null)
|
||||||
|
{
|
||||||
|
coverImage = series.Volumes[0].Chapters.OrderBy(c => double.Parse(c.Number))
|
||||||
|
.FirstOrDefault()?.CoverImage;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
series.CoverImage = firstCover?.CoverImage;
|
series.CoverImage = firstCover?.CoverImage ?? coverImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
|
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return;
|
||||||
@ -88,22 +102,20 @@ namespace API.Services
|
|||||||
if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified))
|
if (firstFile != null && !new FileInfo(firstFile.FilePath).DoesLastWriteMatch(firstFile.LastModified))
|
||||||
{
|
{
|
||||||
series.Summary = _archiveService.GetSummaryInfo(firstFile.FilePath);
|
series.Summary = _archiveService.GetSummaryInfo(firstFile.FilePath);
|
||||||
|
firstFile.LastModified = DateTime.Now;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
|
public void RefreshMetadata(int libraryId, bool forceUpdate = false)
|
||||||
{
|
{
|
||||||
var sw = Stopwatch.StartNew();
|
var sw = Stopwatch.StartNew();
|
||||||
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId)).Result;
|
var library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
|
||||||
var allSeries = Task.Run(() => _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(libraryId)).Result.ToList();
|
|
||||||
|
|
||||||
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
|
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
|
||||||
foreach (var series in allSeries)
|
foreach (var series in library.Series)
|
||||||
{
|
{
|
||||||
series.NormalizedName = Parser.Parser.Normalize(series.Name);
|
foreach (var volume in series.Volumes)
|
||||||
|
|
||||||
var volumes = Task.Run(() => _unitOfWork.SeriesRepository.GetVolumes(series.Id)).Result.ToList();
|
|
||||||
foreach (var volume in volumes)
|
|
||||||
{
|
{
|
||||||
foreach (var chapter in volume.Chapters)
|
foreach (var chapter in volume.Chapters)
|
||||||
{
|
{
|
||||||
|
@ -5,8 +5,6 @@ using API.Helpers.Converters;
|
|||||||
using API.Interfaces;
|
using API.Interfaces;
|
||||||
using API.Interfaces.Services;
|
using API.Interfaces.Services;
|
||||||
using Hangfire;
|
using Hangfire;
|
||||||
using Microsoft.AspNetCore.Hosting;
|
|
||||||
using Microsoft.Extensions.Hosting;
|
|
||||||
using Microsoft.Extensions.Logging;
|
using Microsoft.Extensions.Logging;
|
||||||
|
|
||||||
namespace API.Services
|
namespace API.Services
|
||||||
@ -25,8 +23,7 @@ namespace API.Services
|
|||||||
|
|
||||||
|
|
||||||
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
|
public TaskScheduler(ICacheService cacheService, ILogger<TaskScheduler> logger, IScannerService scannerService,
|
||||||
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService,
|
IUnitOfWork unitOfWork, IMetadataService metadataService, IBackupService backupService, ICleanupService cleanupService)
|
||||||
IWebHostEnvironment env)
|
|
||||||
{
|
{
|
||||||
_cacheService = cacheService;
|
_cacheService = cacheService;
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
@ -36,17 +33,7 @@ namespace API.Services
|
|||||||
_backupService = backupService;
|
_backupService = backupService;
|
||||||
_cleanupService = cleanupService;
|
_cleanupService = cleanupService;
|
||||||
|
|
||||||
if (!env.IsDevelopment())
|
ScheduleTasks();
|
||||||
{
|
|
||||||
ScheduleTasks();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
RecurringJob.RemoveIfExists("scan-libraries");
|
|
||||||
RecurringJob.RemoveIfExists("backup");
|
|
||||||
RecurringJob.RemoveIfExists("cleanup");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void ScheduleTasks()
|
public void ScheduleTasks()
|
||||||
@ -56,8 +43,9 @@ namespace API.Services
|
|||||||
string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
|
string setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskScan)).Result.Value;
|
||||||
if (setting != null)
|
if (setting != null)
|
||||||
{
|
{
|
||||||
_logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting);
|
_logger.LogDebug("Scheduling Scan Library Task for {Setting}", setting);
|
||||||
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting));
|
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(),
|
||||||
|
() => CronConverter.ConvertToCronNotation(setting));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -67,7 +55,7 @@ namespace API.Services
|
|||||||
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
|
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
|
||||||
if (setting != null)
|
if (setting != null)
|
||||||
{
|
{
|
||||||
_logger.LogDebug("Scheduling Backup Task for {Cron}", setting);
|
_logger.LogDebug("Scheduling Backup Task for {Setting}", setting);
|
||||||
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting));
|
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -80,10 +68,10 @@ namespace API.Services
|
|||||||
|
|
||||||
public void ScanLibrary(int libraryId, bool forceUpdate = false)
|
public void ScanLibrary(int libraryId, bool forceUpdate = false)
|
||||||
{
|
{
|
||||||
// TODO: We shouldn't queue up a job if one is already in progress
|
|
||||||
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
|
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
|
||||||
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
|
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
|
||||||
BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change
|
// When we do a scan, force cache to re-unpack in case page numbers change
|
||||||
|
BackgroundJob.Enqueue(() => _cleanupService.Cleanup());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void CleanupChapters(int[] chapterIds)
|
public void CleanupChapters(int[] chapterIds)
|
||||||
|
@ -23,7 +23,6 @@ namespace API.Services.Tasks
|
|||||||
private readonly IArchiveService _archiveService;
|
private readonly IArchiveService _archiveService;
|
||||||
private readonly IMetadataService _metadataService;
|
private readonly IMetadataService _metadataService;
|
||||||
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
|
private ConcurrentDictionary<string, List<ParserInfo>> _scannedSeries;
|
||||||
private bool _forceUpdate;
|
|
||||||
|
|
||||||
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
|
public ScannerService(IUnitOfWork unitOfWork, ILogger<ScannerService> logger, IArchiveService archiveService,
|
||||||
IMetadataService metadataService)
|
IMetadataService metadataService)
|
||||||
@ -34,8 +33,9 @@ namespace API.Services.Tasks
|
|||||||
_metadataService = metadataService;
|
_metadataService = metadataService;
|
||||||
}
|
}
|
||||||
|
|
||||||
[DisableConcurrentExecution(timeoutInSeconds: 5)]
|
|
||||||
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
[DisableConcurrentExecution(timeoutInSeconds: 360)]
|
||||||
|
//[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||||
public void ScanLibraries()
|
public void ScanLibraries()
|
||||||
{
|
{
|
||||||
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
|
var libraries = Task.Run(() => _unitOfWork.LibraryRepository.GetLibrariesAsync()).Result.ToList();
|
||||||
@ -60,22 +60,15 @@ namespace API.Services.Tasks
|
|||||||
//return false;
|
//return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void Cleanup()
|
[DisableConcurrentExecution(360)]
|
||||||
{
|
//[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
||||||
_scannedSeries = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
[DisableConcurrentExecution(5)]
|
|
||||||
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
|
|
||||||
public void ScanLibrary(int libraryId, bool forceUpdate)
|
public void ScanLibrary(int libraryId, bool forceUpdate)
|
||||||
{
|
{
|
||||||
_forceUpdate = forceUpdate;
|
|
||||||
var sw = Stopwatch.StartNew();
|
var sw = Stopwatch.StartNew();
|
||||||
Cleanup();
|
Library library;
|
||||||
Library library;
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).Result;
|
library = Task.Run(() => _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId)).GetAwaiter().GetResult();
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@ -84,8 +77,10 @@ namespace API.Services.Tasks
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
_scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
|
|
||||||
_logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate);
|
_logger.LogInformation("Beginning scan on {LibraryName}. Forcing metadata update: {ForceUpdate}", library.Name, forceUpdate);
|
||||||
|
|
||||||
|
_scannedSeries = new ConcurrentDictionary<string, List<ParserInfo>>();
|
||||||
|
|
||||||
var totalFiles = 0;
|
var totalFiles = 0;
|
||||||
var skippedFolders = 0;
|
var skippedFolders = 0;
|
||||||
@ -104,7 +99,7 @@ namespace API.Services.Tasks
|
|||||||
{
|
{
|
||||||
_logger.LogError(exception, "The file {Filename} could not be found", f);
|
_logger.LogError(exception, "The file {Filename} could not be found", f);
|
||||||
}
|
}
|
||||||
}, Parser.Parser.MangaFileExtensions);
|
}, Parser.Parser.ArchiveFileExtensions);
|
||||||
}
|
}
|
||||||
catch (ArgumentException ex) {
|
catch (ArgumentException ex) {
|
||||||
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path);
|
_logger.LogError(ex, "The directory '{FolderPath}' does not exist", folderPath.Path);
|
||||||
@ -120,15 +115,15 @@ namespace API.Services.Tasks
|
|||||||
{
|
{
|
||||||
_logger.LogInformation("All Folders were skipped due to no modifications to the directories");
|
_logger.LogInformation("All Folders were skipped due to no modifications to the directories");
|
||||||
_unitOfWork.LibraryRepository.Update(library);
|
_unitOfWork.LibraryRepository.Update(library);
|
||||||
|
_scannedSeries = null;
|
||||||
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name);
|
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds, library.Name);
|
||||||
Cleanup();
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove any series where there were no parsed infos
|
// Remove any series where there were no parsed infos
|
||||||
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
|
var filtered = _scannedSeries.Where(kvp => kvp.Value.Count != 0);
|
||||||
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
|
var series = filtered.ToDictionary(v => v.Key, v => v.Value);
|
||||||
|
|
||||||
UpdateLibrary(library, series);
|
UpdateLibrary(library, series);
|
||||||
_unitOfWork.LibraryRepository.Update(library);
|
_unitOfWork.LibraryRepository.Update(library);
|
||||||
|
|
||||||
@ -140,8 +135,22 @@ namespace API.Services.Tasks
|
|||||||
{
|
{
|
||||||
_logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan");
|
_logger.LogError("There was a critical error that resulted in a failed scan. Please check logs and rescan");
|
||||||
}
|
}
|
||||||
|
_scannedSeries = null;
|
||||||
|
|
||||||
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
|
_logger.LogInformation("Processed {TotalFiles} files in {ElapsedScanTime} milliseconds for {LibraryName}", totalFiles, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
|
||||||
|
|
||||||
|
// Cleanup any user progress that doesn't exist
|
||||||
|
var cleanedUp = Task.Run(() => _unitOfWork.AppUserProgressRepository.CleanupAbandonedChapters()).Result;
|
||||||
|
if (cleanedUp)
|
||||||
|
{
|
||||||
|
_logger.LogInformation("Removed all abandoned progress rows");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_logger.LogWarning("There are abandoned user progress entities in the DB. In Progress activity stream will be skewed");
|
||||||
|
}
|
||||||
|
|
||||||
|
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
|
private void UpdateLibrary(Library library, Dictionary<string, List<ParserInfo>> parsedSeries)
|
||||||
@ -191,7 +200,6 @@ namespace API.Services.Tasks
|
|||||||
_logger.LogInformation("Processing series {SeriesName}", series.Name);
|
_logger.LogInformation("Processing series {SeriesName}", series.Name);
|
||||||
UpdateVolumes(series, parsedSeries[series.Name].ToArray());
|
UpdateVolumes(series, parsedSeries[series.Name].ToArray());
|
||||||
series.Pages = series.Volumes.Sum(v => v.Pages);
|
series.Pages = series.Volumes.Sum(v => v.Pages);
|
||||||
_metadataService.UpdateMetadata(series, _forceUpdate);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@ -221,28 +229,16 @@ namespace API.Services.Tasks
|
|||||||
series.Volumes.Add(volume);
|
series.Volumes.Add(volume);
|
||||||
}
|
}
|
||||||
|
|
||||||
volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial); // TODO: I don't think we need this as chapters now handle specials
|
// NOTE: I don't think we need this as chapters now handle specials
|
||||||
|
volume.IsSpecial = volume.Number == 0 && infos.All(p => p.Chapters == "0" || p.IsSpecial);
|
||||||
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
|
||||||
// Remove any instances of Chapters with Range of 0. Range of 0 chapters are no longer supported.
|
|
||||||
//volume.Chapters = volume.Chapters.Where(c => c.IsSpecial && c.Files.Count > 1).ToList();
|
|
||||||
UpdateChapters(volume, infos);
|
UpdateChapters(volume, infos);
|
||||||
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
volume.Pages = volume.Chapters.Sum(c => c.Pages);
|
||||||
_metadataService.UpdateMetadata(volume, _forceUpdate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
|
// Remove existing volumes that aren't in parsedInfos and volumes that have no chapters
|
||||||
var existingVolumes = series.Volumes.ToList();
|
series.Volumes = series.Volumes.Where(v => parsedInfos.Any(p => p.Volumes == v.Name)).ToList();
|
||||||
foreach (var volume in existingVolumes)
|
|
||||||
{
|
|
||||||
// I can't remove based on chapter count as I haven't updated Chapters || volume.Chapters.Count == 0
|
|
||||||
var hasInfo = parsedInfos.Any(v => v.Volumes == volume.Name);
|
|
||||||
if (!hasInfo)
|
|
||||||
{
|
|
||||||
series.Volumes.Remove(volume);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
|
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
|
||||||
series.Name, startingVolumeCount, series.Volumes.Count);
|
series.Name, startingVolumeCount, series.Volumes.Count);
|
||||||
@ -256,51 +252,62 @@ namespace API.Services.Tasks
|
|||||||
// Add new chapters
|
// Add new chapters
|
||||||
foreach (var info in parsedInfos)
|
foreach (var info in parsedInfos)
|
||||||
{
|
{
|
||||||
// Specials go into their own chapters with Range being their filename and IsSpecial = True
|
var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
|
||||||
// BUG: If we have an existing chapter with Range == 0 and it has our file, we wont split.
|
// Specials go into their own chapters with Range being their filename and IsSpecial = True. Non-Specials with Vol and Chap as 0
|
||||||
var chapter = info.IsSpecial ? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename || (c.Files.Select(f => f.FilePath).Contains(info.FullFilePath)))
|
// also are treated like specials
|
||||||
: volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
|
_logger.LogDebug("Adding new chapters, {Series} - Vol {Volume} Ch {Chapter} - Needs Special Treatment? {NeedsSpecialTreatment}", info.Series, info.Volumes, info.Chapters, specialTreatment);
|
||||||
|
// If there are duplicate files that parse out to be the same but a different series name (but parses to same normalized name ie History's strongest
|
||||||
|
// vs Historys strongest), this code will break and the duplicate will be skipped.
|
||||||
|
Chapter chapter = null;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
chapter = specialTreatment
|
||||||
|
? volume.Chapters.SingleOrDefault(c => c.Range == info.Filename
|
||||||
|
|| (c.Files.Select(f => f.FilePath)
|
||||||
|
.Contains(info.FullFilePath)))
|
||||||
|
: volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogError(ex, "{FileName} mapped as '{Series} - Vol {Volume} Ch {Chapter}' is a duplicate, skipping", info.FullFilePath, info.Series, info.Volumes, info.Chapters);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
if (chapter == null)
|
if (chapter == null)
|
||||||
{
|
{
|
||||||
chapter = new Chapter()
|
chapter = new Chapter()
|
||||||
{
|
{
|
||||||
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "",
|
Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + string.Empty,
|
||||||
Range = info.IsSpecial ? info.Filename : info.Chapters,
|
Range = specialTreatment ? info.Filename : info.Chapters,
|
||||||
Files = new List<MangaFile>(),
|
Files = new List<MangaFile>(),
|
||||||
IsSpecial = info.IsSpecial
|
IsSpecial = specialTreatment
|
||||||
};
|
};
|
||||||
volume.Chapters.Add(chapter);
|
volume.Chapters.Add(chapter);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info.IsSpecial && chapter.Files.Count > 1)
|
|
||||||
{
|
|
||||||
// Split the Manga files into 2 separate chapters
|
|
||||||
}
|
|
||||||
|
|
||||||
chapter.Files ??= new List<MangaFile>();
|
chapter.Files ??= new List<MangaFile>();
|
||||||
chapter.IsSpecial = info.IsSpecial;
|
chapter.IsSpecial = specialTreatment;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add files
|
// Add files
|
||||||
foreach (var info in parsedInfos)
|
foreach (var info in parsedInfos)
|
||||||
{
|
{
|
||||||
|
var specialTreatment = (info.IsSpecial || (info.Volumes == "0" && info.Chapters == "0"));
|
||||||
Chapter chapter = null;
|
Chapter chapter = null;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (info.IsSpecial && c.Range == info.Filename));
|
chapter = volume.Chapters.SingleOrDefault(c => c.Range == info.Chapters || (specialTreatment && c.Range == info.Filename));
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
_logger.LogError(ex, "There was an exception parsing chapter. Skipping Vol {VolumeNumber} Chapter {ChapterNumber}", volume.Name, info.Chapters);
|
_logger.LogError(ex, "There was an exception parsing chapter. Skipping {SeriesName} Vol {VolumeNumber} Chapter {ChapterNumber} - Special treatment: {NeedsSpecialTreatment}", info.Series, volume.Name, info.Chapters, specialTreatment);
|
||||||
}
|
}
|
||||||
if (chapter == null) continue;
|
if (chapter == null) continue;
|
||||||
AddOrUpdateFileForChapter(chapter, info);
|
AddOrUpdateFileForChapter(chapter, info);
|
||||||
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
|
chapter.Number = Parser.Parser.MinimumNumberFromRange(info.Chapters) + "";
|
||||||
chapter.Range = info.IsSpecial ? info.Filename : info.Chapters;
|
chapter.Range = specialTreatment ? info.Filename : info.Chapters;
|
||||||
chapter.Pages = chapter.Files.Sum(f => f.Pages);
|
chapter.Pages = chapter.Files.Sum(f => f.Pages);
|
||||||
_metadataService.UpdateMetadata(chapter, _forceUpdate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -309,18 +316,14 @@ namespace API.Services.Tasks
|
|||||||
var existingChapters = volume.Chapters.ToList();
|
var existingChapters = volume.Chapters.ToList();
|
||||||
foreach (var existingChapter in existingChapters)
|
foreach (var existingChapter in existingChapters)
|
||||||
{
|
{
|
||||||
var hasInfo = existingChapter.IsSpecial ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
|
var specialTreatment = (existingChapter.IsSpecial || (existingChapter.Number == "0" && !int.TryParse(existingChapter.Range, out int i)));
|
||||||
|
var hasInfo = specialTreatment ? parsedInfos.Any(v => v.Filename == existingChapter.Range)
|
||||||
: parsedInfos.Any(v => v.Chapters == existingChapter.Range);
|
: parsedInfos.Any(v => v.Chapters == existingChapter.Range);
|
||||||
|
|
||||||
if (!hasInfo || !existingChapter.Files.Any())
|
if (!hasInfo || !existingChapter.Files.Any())
|
||||||
{
|
{
|
||||||
volume.Chapters.Remove(existingChapter);
|
volume.Chapters.Remove(existingChapter);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if (hasInfo && existingChapter.IsSpecial && existingChapter.Files.Count > 1)
|
|
||||||
// {
|
|
||||||
//
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
|
_logger.LogDebug("Updated chapters from {StartingChaptersCount} to {ChapterCount}",
|
||||||
@ -328,7 +331,7 @@ namespace API.Services.Tasks
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Attempts to either add a new instance of a show mapping to the scannedSeries bag or adds to an existing.
|
/// Attempts to either add a new instance of a show mapping to the _scannedSeries bag or adds to an existing.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="info"></param>
|
/// <param name="info"></param>
|
||||||
private void TrackSeries(ParserInfo info)
|
private void TrackSeries(ParserInfo info)
|
||||||
@ -337,6 +340,7 @@ namespace API.Services.Tasks
|
|||||||
|
|
||||||
// Check if normalized info.Series already exists and if so, update info to use that name instead
|
// Check if normalized info.Series already exists and if so, update info to use that name instead
|
||||||
var normalizedSeries = Parser.Parser.Normalize(info.Series);
|
var normalizedSeries = Parser.Parser.Normalize(info.Series);
|
||||||
|
_logger.LogDebug("Checking if we can merge {NormalizedSeries}", normalizedSeries);
|
||||||
var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
|
var existingName = _scannedSeries.SingleOrDefault(p => Parser.Parser.Normalize(p.Key) == normalizedSeries)
|
||||||
.Key;
|
.Key;
|
||||||
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
|
if (!string.IsNullOrEmpty(existingName) && info.Series != existingName)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user