Scrobbling Polish and Some Scanner fixes (#3638)

Co-authored-by: Fesaa <77553571+Fesaa@users.noreply.github.com>
This commit is contained in:
Joe Milazzo 2025-03-15 11:13:01 -05:00 committed by GitHub
parent 82e8f7fade
commit f281a63934
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 658 additions and 102 deletions

View File

@ -38,7 +38,7 @@ jobs:
strategy:
fail-fast: false
matrix:
language: [ 'csharp', 'javascript-typescript', 'python' ]
language: [ 'csharp', 'javascript-typescript' ]
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both

View File

@ -1,4 +1,5 @@
using System;
using System.Threading.Tasks;
using API.Helpers;
using Xunit;
@ -33,7 +34,7 @@ public class RateLimiterTests
}
[Fact]
public void AcquireTokens_Refill()
public async Task AcquireTokens_Refill()
{
// Arrange
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
@ -43,14 +44,14 @@ public class RateLimiterTests
limiter.TryAcquire("test_key");
// Wait for refill
System.Threading.Thread.Sleep(1100);
await Task.Delay(1100);
// Assert
Assert.True(limiter.TryAcquire("test_key"));
}
[Fact]
public void AcquireTokens_Refill_WithOff()
public async Task AcquireTokens_Refill_WithOff()
{
// Arrange
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
@ -60,7 +61,7 @@ public class RateLimiterTests
limiter.TryAcquire("test_key");
// Wait for refill
System.Threading.Thread.Sleep(2100);
await Task.Delay(2100);
// Assert
Assert.False(limiter.TryAcquire("test_key"));

View File

@ -11,8 +11,22 @@ public class StringHelperTests
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> <br><br><br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>",
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?</p>"
)]
public void Test(string input, string expected)
public void TestSquashBreaklines(string input, string expected)
{
Assert.Equal(expected, StringHelper.SquashBreaklines(input));
}
[Theory]
[InlineData(
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /> (Source: Anime News Network)</p>",
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
)]
[InlineData(
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>(Source: Anime News Network)",
"<p>A Perfect Marriage Becomes a Perfect Affair!<br /></p>"
)]
public void TestRemoveSourceInDescription(string input, string expected)
{
Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
}
}

View File

@ -451,4 +451,124 @@ public class ParseScannedFilesTests : AbstractDbTest
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(1, changes);
}
[Fact]
public async Task SubFoldersNoSubFolders_SkipAll()
{
const string testcase = "Subfolders and files at root - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
_unitOfWork.LibraryRepository.Update(library);
await _unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
Assert.Equal(3, spiceAndWolf.Volumes.Count);
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
// Needs to be actual time as the write time is now, so if we set LastFolderChecked in the past
// it'll always a scan as it was changed since the last scan.
Thread.Sleep(1100); // Ensure at least one second has passed since library scan
var res = await psf.ScanFiles(testDirectoryPath, true,
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
Assert.DoesNotContain(res, sc => sc.HasChanged);
}
[Fact]
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
{
const string testcase = "Subfolders and files at root - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
_unitOfWork.LibraryRepository.Update(library);
await _unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
Assert.Equal(3, spiceAndWolf.Volumes.Count);
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
_context.Series.Update(spiceAndWolf);
await _context.SaveChangesAsync();
// Add file at series root
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
var res = await psf.ScanFiles(testDirectoryPath, true,
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(2, changes);
}
[Fact]
public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
{
const string testcase = "Subfolders and files at root - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
_unitOfWork.LibraryRepository.Update(library);
await _unitOfWork.CommitAsync();
var fs = new FileSystem();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), fs);
var psf = new ParseScannedFiles(Substitute.For<ILogger<ParseScannedFiles>>(), ds,
new MockReadingItemService(ds, Substitute.For<IBookService>()), Substitute.For<IEventHub>());
var scanner = _scannerHelper.CreateServices(ds, fs);
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
Assert.Equal(3, spiceAndWolf.Volumes.Count);
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
_context.Series.Update(spiceAndWolf);
await _context.SaveChangesAsync();
// Add file in subfolder
var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0011.cbz"),
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
var res = await psf.ScanFiles(testDirectoryPath, true,
await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(2, changes);
}
}

View File

@ -55,6 +55,28 @@ public class ScannerServiceTests : AbstractDbTest
await _context.SaveChangesAsync();
}
protected async Task SetAllSeriesLastScannedInThePast(Library library, TimeSpan? duration = null)
{
foreach (var series in library.Series)
{
await SetLastScannedInThePast(series, duration, false);
}
await _context.SaveChangesAsync();
}
protected async Task SetLastScannedInThePast(Series series, TimeSpan? duration = null, bool save = true)
{
duration ??= TimeSpan.FromMinutes(2);
series.LastFolderScanned = DateTime.Now.Subtract(duration.Value);
_context.Series.Update(series);
if (save)
{
await _context.SaveChangesAsync();
}
}
[Fact]
public async Task ScanLibrary_ComicVine_PublisherFolder()
{
@ -611,9 +633,7 @@ public class ScannerServiceTests : AbstractDbTest
File.Copy(Path.Join(root1PlushFolder, "Plush v02.cbz"), Path.Join(root1PlushFolder, "Plush v03.cbz"));
// Emulate time passage by updating lastFolderScan to be a min in the past
s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
_context.Series.Update(s);
await _context.SaveChangesAsync();
await SetLastScannedInThePast(s);
// Rescan to ensure nothing changes yet again
await scanner.ScanLibrary(library.Id, false);
@ -702,12 +722,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Contains(postLib.Series, s => s.Name == "Plush");
// Emulate time passage by updating lastFolderScan to be a min in the past
foreach (var s in postLib.Series)
{
s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
_context.Series.Update(s);
}
await _context.SaveChangesAsync();
await SetAllSeriesLastScannedInThePast(postLib);
// Fourth Scan: Run again to check stability (should not remove Accel)
await scanner.ScanLibrary(library.Id);
@ -794,7 +809,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count));
Thread.Sleep(1100); // Ensure at least one second has passed since library scan
await SetAllSeriesLastScannedInThePast(postLib);
// Add a new chapter to a volume of the series, and scan. Validate that no chapters were lost, and the new
// chapter was added
@ -822,4 +837,94 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
Assert.Equal(3, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count)); // Incremented by 1
}
[Fact]
public async Task RemovalPickedUp_NoOtherChanges()
{
const string testcase = "Series removed when no other changes are made - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
_unitOfWork.LibraryRepository.Update(library);
await _unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Equal(2, postLib.Series.Count);
var executionerCopyDir = Path.Join(testDirectoryPath, "The Executioner and Her Way of Life");
Directory.Delete(executionerCopyDir, true);
await scanner.ScanLibrary(library.Id);
await _unitOfWork.CommitAsync();
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
Assert.Single(postLib.Series, s => s.Name == "Spice and Wolf");
Assert.Equal(2, postLib.Series.First().Volumes.Count);
}
[Fact]
public async Task SubFoldersNoSubFolders_CorrectPickupAfterAdd()
{
// This test case is used in multiple tests and can result in conflict if not separated
const string testcase = "Subfolders and files at root (2) - Manga.json";
var infos = new Dictionary<string, ComicInfo>();
var library = await _scannerHelper.GenerateScannerData(testcase, infos);
var testDirectoryPath = library.Folders.First().Path;
_unitOfWork.LibraryRepository.Update(library);
await _unitOfWork.CommitAsync();
var scanner = _scannerHelper.CreateServices();
await scanner.ScanLibrary(library.Id);
var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
Assert.Equal(3, spiceAndWolf.Volumes.Count);
Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
await SetLastScannedInThePast(spiceAndWolf);
// Add volume to Spice and Wolf series directory
var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
await scanner.ScanLibrary(library.Id);
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
Assert.Equal(4, spiceAndWolf.Volumes.Count);
Assert.Equal(5, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
await SetLastScannedInThePast(spiceAndWolf);
// Add file in subfolder
spiceAndWolfDir = Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3");
File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0012.cbz"),
Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
await scanner.ScanLibrary(library.Id);
postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
Assert.NotNull(postLib);
Assert.Single(postLib.Series);
spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
Assert.Equal(4, spiceAndWolf.Volumes.Count);
Assert.Equal(6, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
}
}

View File

@ -1,11 +1,208 @@
using API.Services.Plus;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs.Scrobbling;
using API.Entities.Enums;
using API.Helpers.Builders;
using API.Services;
using API.Services.Plus;
using API.SignalR;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit;
namespace API.Tests.Services;
#nullable enable
public class ScrobblingServiceTests
public class ScrobblingServiceTests : AbstractDbTest
{
private readonly ScrobblingService _service;
private readonly ILicenseService _licenseService;
private readonly ILocalizationService _localizationService;
private readonly ILogger<ScrobblingService> _logger;
private readonly IEmailService _emailService;
public ScrobblingServiceTests()
{
_licenseService = Substitute.For<ILicenseService>();
_localizationService = Substitute.For<ILocalizationService>();
_logger = Substitute.For<ILogger<ScrobblingService>>();
_emailService = Substitute.For<IEmailService>();
_service = new ScrobblingService(_unitOfWork, Substitute.For<IEventHub>(), _logger, _licenseService, _localizationService, _emailService);
}
protected override async Task ResetDb()
{
_context.ScrobbleEvent.RemoveRange(_context.ScrobbleEvent.ToList());
_context.Series.RemoveRange(_context.Series.ToList());
_context.Library.RemoveRange(_context.Library.ToList());
_context.AppUser.RemoveRange(_context.AppUser.ToList());
await _unitOfWork.CommitAsync();
}
private async Task SeedData()
{
var series = new SeriesBuilder("Test Series")
.WithFormat(MangaFormat.Archive)
.WithMetadata(new SeriesMetadataBuilder().Build())
.Build();
var library = new LibraryBuilder("Test Library", LibraryType.Manga)
.WithAllowScrobbling(true)
.WithSeries(series)
.Build();
_context.Library.Add(library);
var user = new AppUserBuilder("testuser", "testuser")
//.WithPreferences(new UserPreferencesBuilder().WithAniListScrobblingEnabled(true).Build())
.Build();
user.UserPreferences.AniListScrobblingEnabled = true;
_unitOfWork.UserRepository.Add(user);
await _unitOfWork.CommitAsync();
}
#region ScrobbleWantToReadUpdate Tests
[Fact]
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_WantToRead_ShouldCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// Act
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Assert
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Equal(ScrobbleEventType.AddWantToRead, events[0].ScrobbleEventType);
Assert.Equal(userId, events[0].AppUserId);
}
[Fact]
public async Task ScrobbleWantToReadUpdate_NoExistingEvents_RemoveWantToRead_ShouldCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// Act
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Assert
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Equal(ScrobbleEventType.RemoveWantToRead, events[0].ScrobbleEventType);
Assert.Equal(userId, events[0].AppUserId);
}
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_WantToRead_ShouldNotCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create an event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Act - Try to create the same event again
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Assert
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.All(events, e => Assert.Equal(ScrobbleEventType.AddWantToRead, e.ScrobbleEventType));
}
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_RemoveWantToRead_ShouldAddRemoveEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create a want-to-read event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Act - Now remove from want-to-read
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Assert
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.RemoveWantToRead);
}
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_RemoveWantToRead_ShouldNotCreateNewEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create a remove-from-want-to-read event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Act - Try to create the same event again
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Assert
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.All(events, e => Assert.Equal(ScrobbleEventType.RemoveWantToRead, e.ScrobbleEventType));
}
[Fact]
public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_WantToRead_ShouldAddWantToReadEvent()
{
// Arrange
await SeedData();
_licenseService.HasActiveLicense().Returns(Task.FromResult(true));
const int userId = 1;
const int seriesId = 1;
// First, let's create a remove-from-want-to-read event through the service
await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
// Act - Now add to want-to-read
await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
// Assert
var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
Assert.Single(events);
Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.AddWantToRead);
}
#endregion
[Theory]
[InlineData("https://anilist.co/manga/35851/Byeontaega-Doeja/", 35851)]
[InlineData("https://anilist.co/manga/30105", 30105)]

View File

@ -0,0 +1,6 @@
[
"Spice and Wolf/Spice and Wolf Vol. 1.cbz",
"Spice and Wolf/Spice and Wolf Vol. 2.cbz",
"The Executioner and Her Way of Life/The Executioner and Her Way of Life Vol. 1.cbz",
"The Executioner and Her Way of Life/The Executioner and Her Way of Life Vol. 2.cbz"
]

View File

@ -0,0 +1,6 @@
[
"Spice and Wolf/Spice and Wolf Vol. 1.cbz",
"Spice and Wolf/Spice and Wolf Vol. 2.cbz",
"Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0011.cbz",
"Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0012.cbz"
]

View File

@ -0,0 +1,6 @@
[
"Spice and Wolf/Spice and Wolf Vol. 1.cbz",
"Spice and Wolf/Spice and Wolf Vol. 2.cbz",
"Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0011.cbz",
"Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0012.cbz"
]

View File

@ -12,6 +12,7 @@ using Hangfire;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using TaskScheduler = API.Services.TaskScheduler;
namespace API.Controllers;
@ -81,6 +82,10 @@ public class LicenseController(
}
}
/// <summary>
/// Remove the Kavita+ License on the Server
/// </summary>
/// <returns></returns>
[Authorize("RequireAdminRole")]
[HttpDelete]
[ResponseCache(CacheProfileName = ResponseCacheProfiles.LicenseCache)]
@ -91,7 +96,9 @@ public class LicenseController(
setting.Value = null;
unitOfWork.SettingsRepository.Update(setting);
await unitOfWork.CommitAsync();
await taskScheduler.ScheduleKavitaPlusTasks();
TaskScheduler.RemoveKavitaPlusTasks();
return Ok();
}

View File

@ -99,7 +99,7 @@ public class SeriesController : BaseApiController
/// <param name="filterDto"></param>
/// <returns></returns>
[HttpPost("v2")]
public async Task<ActionResult<IEnumerable<Series>>> GetSeriesForLibraryV2([FromQuery] UserParams userParams, [FromBody] FilterV2Dto filterDto)
public async Task<ActionResult<PagedList<SeriesDto>>> GetSeriesForLibraryV2([FromQuery] UserParams userParams, [FromBody] FilterV2Dto filterDto)
{
var userId = User.GetUserId();
var series =

View File

@ -32,6 +32,8 @@ public interface IScrobbleRepository
Task<IEnumerable<ScrobbleEvent>> GetUserEventsForSeries(int userId, int seriesId);
Task<PagedList<ScrobbleEventDto>> GetUserEvents(int userId, ScrobbleEventFilter filter, UserParams pagination);
Task<IList<ScrobbleEvent>> GetAllEventsForSeries(int seriesId);
Task<IList<ScrobbleEvent>> GetAllEventsWithSeriesIds(IEnumerable<int> seriesIds);
Task<IList<ScrobbleEvent>> GetEvents();
}
/// <summary>
@ -97,6 +99,11 @@ public class ScrobbleRepository : IScrobbleRepository
.ToListAsync();
}
/// <summary>
/// Returns all processed events that were processed 7 or more days ago
/// </summary>
/// <param name="daysAgo"></param>
/// <returns></returns>
public async Task<IList<ScrobbleEvent>> GetProcessedEvents(int daysAgo)
{
var date = DateTime.UtcNow.Subtract(TimeSpan.FromDays(daysAgo));
@ -172,8 +179,22 @@ public class ScrobbleRepository : IScrobbleRepository
public async Task<IList<ScrobbleEvent>> GetAllEventsForSeries(int seriesId)
{
return await _context.ScrobbleEvent.Where(e => e.SeriesId == seriesId)
return await _context.ScrobbleEvent
.Where(e => e.SeriesId == seriesId)
.ToListAsync();
}
public async Task<IList<ScrobbleEvent>> GetAllEventsWithSeriesIds(IEnumerable<int> seriesIds)
{
return await _context.ScrobbleEvent
.Where(e => seriesIds.Contains(e.SeriesId))
.ToListAsync();
}
public async Task<IList<ScrobbleEvent>> GetEvents()
{
return await _context.ScrobbleEvent
.Include(e => e.AppUser)
.ToListAsync();
}
}

View File

@ -3,8 +3,19 @@
namespace API.Helpers;
#nullable enable
public static class StringHelper
public static partial class StringHelper
{
#region Regex Source Generators
[GeneratedRegex(@"\s?\(Source:\s*[^)]+\)")]
private static partial Regex SourceRegex();
[GeneratedRegex(@"<br\s*/?>", RegexOptions.IgnoreCase | RegexOptions.Compiled, "en-US")]
private static partial Regex BrStandardizeRegex();
[GeneratedRegex(@"(?:<br />\s*)+", RegexOptions.IgnoreCase | RegexOptions.Compiled, "en-US")]
private static partial Regex BrMultipleRegex();
[GeneratedRegex(@"\s+")]
private static partial Regex WhiteSpaceRegex();
#endregion
/// <summary>
/// Used to squash duplicate break and new lines with a single new line.
/// </summary>
@ -19,13 +30,13 @@ public static class StringHelper
}
// First standardize all br tags to <br /> format
summary = Regex.Replace(summary, @"<br\s*/?>", "<br />", RegexOptions.IgnoreCase | RegexOptions.Compiled);
summary = BrStandardizeRegex().Replace(summary, "<br />");
// Replace multiple consecutive br tags with a single br tag
summary = Regex.Replace(summary, @"(?:<br />\s*)+", "<br /> ", RegexOptions.IgnoreCase | RegexOptions.Compiled);
summary = BrMultipleRegex().Replace(summary, "<br /> ");
// Normalize remaining whitespace (replace multiple spaces with a single space)
summary = Regex.Replace(summary, @"\s+", " ").Trim();
summary = WhiteSpaceRegex().Replace(summary, " ").Trim();
return summary.Trim();
}
@ -37,6 +48,8 @@ public static class StringHelper
/// <returns></returns>
public static string? RemoveSourceInDescription(string? description)
{
return description?.Trim();
if (string.IsNullOrEmpty(description)) return description;
return SourceRegex().Replace(description, string.Empty).Trim();
}
}

View File

@ -231,7 +231,7 @@ public class ExternalMetadataService : IExternalMetadataService
// Some summaries can contain multiple <br/>s, we need to ensure it's only 1
foreach (var result in results)
{
result.Series.Summary = StringHelper.SquashBreaklines(result.Series.Summary);
result.Series.Summary = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(result.Series.Summary));
}
return results;
@ -730,7 +730,7 @@ public class ExternalMetadataService : IExternalMetadataService
{
Name = w.Name,
AniListId = ScrobblingService.ExtractId<int>(w.Url, ScrobblingService.AniListCharacterWebsite),
Description = StringHelper.SquashBreaklines(w.Description),
Description = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(w.Description)),
})
.Concat(series.Metadata.People
.Where(p => p.Role == PersonRole.Character)
@ -809,7 +809,7 @@ public class ExternalMetadataService : IExternalMetadataService
{
Name = w.Name,
AniListId = ScrobblingService.ExtractId<int>(w.Url, ScrobblingService.AniListStaffWebsite),
Description = StringHelper.SquashBreaklines(w.Description),
Description = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(w.Description)),
})
.Concat(series.Metadata.People
.Where(p => p.Role == PersonRole.CoverArtist)
@ -866,7 +866,7 @@ public class ExternalMetadataService : IExternalMetadataService
{
Name = w.Name,
AniListId = ScrobblingService.ExtractId<int>(w.Url, ScrobblingService.AniListStaffWebsite),
Description = StringHelper.SquashBreaklines(w.Description),
Description = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(w.Description)),
})
.Concat(series.Metadata.People
.Where(p => p.Role == PersonRole.Writer)
@ -1122,7 +1122,7 @@ public class ExternalMetadataService : IExternalMetadataService
return false;
}
series.Metadata.Summary = StringHelper.SquashBreaklines(externalMetadata.Summary);
series.Metadata.Summary = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(externalMetadata.Summary));
return true;
}
@ -1432,7 +1432,7 @@ public class ExternalMetadataService : IExternalMetadataService
.PostJsonAsync(payload)
.ReceiveJson<ExternalSeriesDetailDto>();
ret.Summary = StringHelper.SquashBreaklines(ret.Summary);
ret.Summary = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(ret.Summary));
return ret;

View File

@ -165,17 +165,15 @@ public class ScrobblingService : IScrobblingService
private async Task<bool> ShouldSendEarlyReminder(int userId, DateTime tokenExpiry)
{
var earlyReminderDate = tokenExpiry.AddDays(-5);
if (earlyReminderDate <= DateTime.UtcNow)
{
var hasAlreadySentReminder = await _unitOfWork.DataContext.EmailHistory
.AnyAsync(h => h.AppUserId == userId && h.Sent &&
h.EmailTemplate == EmailService.TokenExpiringSoonTemplate &&
h.SendDate >= earlyReminderDate);
if (earlyReminderDate > DateTime.UtcNow) return false;
return !hasAlreadySentReminder;
}
var hasAlreadySentReminder = await _unitOfWork.DataContext.EmailHistory
.AnyAsync(h => h.AppUserId == userId && h.Sent &&
h.EmailTemplate == EmailService.TokenExpiringSoonTemplate &&
h.SendDate >= earlyReminderDate);
return !hasAlreadySentReminder;
return false;
}
/// <summary>
@ -183,17 +181,15 @@ public class ScrobblingService : IScrobblingService
/// </summary>
private async Task<bool> ShouldSendExpirationReminder(int userId, DateTime tokenExpiry)
{
if (tokenExpiry <= DateTime.UtcNow)
{
var hasAlreadySentExpirationEmail = await _unitOfWork.DataContext.EmailHistory
.AnyAsync(h => h.AppUserId == userId && h.Sent &&
h.EmailTemplate == EmailService.TokenExpirationTemplate &&
h.SendDate >= tokenExpiry);
if (tokenExpiry > DateTime.UtcNow) return false;
return !hasAlreadySentExpirationEmail;
}
var hasAlreadySentExpirationEmail = await _unitOfWork.DataContext.EmailHistory
.AnyAsync(h => h.AppUserId == userId && h.Sent &&
h.EmailTemplate == EmailService.TokenExpirationTemplate &&
h.SendDate >= tokenExpiry);
return !hasAlreadySentExpirationEmail;
return false;
}
@ -433,10 +429,17 @@ public class ScrobblingService : IScrobblingService
if (await CheckIfCannotScrobble(userId, seriesId, series)) return;
_logger.LogInformation("Processing Scrobbling want-to-read event for {UserId} on {SeriesName}", userId, series.Name);
var existing = await _unitOfWork.ScrobbleRepository.Exists(userId, series.Id,
onWantToRead ? ScrobbleEventType.AddWantToRead : ScrobbleEventType.RemoveWantToRead);
if (existing) return; // BUG: If I take a series and add to remove from want to read, then add to want to read, Kavita rejects the second as a duplicate, when it's not
// Get existing events for this series/user
var existingEvents = (await _unitOfWork.ScrobbleRepository.GetUserEventsForSeries(userId, seriesId))
.Where(e => new[] { ScrobbleEventType.AddWantToRead, ScrobbleEventType.RemoveWantToRead }.Contains(e.ScrobbleEventType));
// Remove all existing want-to-read events for this series/user
foreach (var existingEvent in existingEvents)
{
_unitOfWork.ScrobbleRepository.Remove(existingEvent);
}
// Create the new event
var evt = new ScrobbleEvent()
{
SeriesId = series.Id,
@ -447,6 +450,7 @@ public class ScrobblingService : IScrobblingService
AppUserId = userId,
Format = series.Library.Type.ConvertToPlusMediaFormat(series.Format),
};
_unitOfWork.ScrobbleRepository.Attach(evt);
await _unitOfWork.CommitAsync();
_logger.LogDebug("Added Scrobbling WantToRead update on {SeriesName} with Userid {UserId} ", series.Name, userId);
@ -465,6 +469,7 @@ public class ScrobblingService : IScrobblingService
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId);
if (library is not {AllowScrobbling: true}) return true;
if (!ExternalMetadataService.IsPlusEligible(library.Type)) return true;
return false;
}
@ -481,7 +486,7 @@ public class ScrobblingService : IScrobblingService
}
catch (Exception e)
{
_logger.LogError(e, "An error happened during the request to Kavita+ API");
_logger.LogError(e, "An error happened trying to get rate limit from Kavita+ API");
}
return 0;
@ -737,8 +742,10 @@ public class ScrobblingService : IScrobblingService
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ClearProcessedEvents()
{
var events = await _unitOfWork.ScrobbleRepository.GetProcessedEvents(7);
const int daysAgo = 7;
var events = await _unitOfWork.ScrobbleRepository.GetProcessedEvents(daysAgo);
_unitOfWork.ScrobbleRepository.Remove(events);
_logger.LogInformation("Removing {Count} scrobble events that have been processed {DaysAgo}+ days ago", events.Count, daysAgo);
await _unitOfWork.CommitAsync();
}
@ -752,7 +759,6 @@ public class ScrobblingService : IScrobblingService
{
// Check how many scrobble events we have available then only do those.
var userRateLimits = new Dictionary<int, int>();
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
var progressCounter = 0;
@ -784,32 +790,24 @@ public class ScrobblingService : IScrobblingService
.Where(e => !errors.Contains(e.SeriesId))
.ToList();
var decisions = addToWantToRead
.GroupBy(item => new { item.SeriesId, item.AppUserId })
.Select(group => new
{
group.Key.SeriesId,
UserId = group.Key.AppUserId,
Event = group.First(),
Decision = group.Count() - removeWantToRead
.Count(removeItem => removeItem.SeriesId == group.Key.SeriesId && removeItem.AppUserId == group.Key.AppUserId)
})
.Where(d => d.Decision > 0)
.Select(d => d.Event)
.ToList();
var decisions = CalculateNetWantToReadDecisions(addToWantToRead, removeWantToRead);
// Get all the applicable users to scrobble and set their rate limits
var usersToScrobble = await PrepareUsersToScrobble(readEvents, addToWantToRead, removeWantToRead, ratingEvents, userRateLimits, license);
// Clear any events that are already on error table
var erroredEvents = await _unitOfWork.ScrobbleRepository.GetAllEventsWithSeriesIds(errors);
if (erroredEvents.Count > 0)
{
_unitOfWork.ScrobbleRepository.Remove(erroredEvents);
await _unitOfWork.CommitAsync();
}
var totalEvents = readEvents.Count + decisions.Count + ratingEvents.Count;
if (totalEvents == 0) return;
// Get all the applicable users to scrobble and set their rate limits
var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
var usersToScrobble = await PrepareUsersToScrobble(readEvents, addToWantToRead, removeWantToRead, ratingEvents, userRateLimits, license);
if (totalEvents == 0)
{
return;
}
_logger.LogInformation("Scrobble Processing Details:" +
"\n Read Events: {ReadEventsCount}" +
"\n Want to Read Events: {WantToReadEventsCount}" +
@ -828,7 +826,7 @@ public class ScrobblingService : IScrobblingService
progressCounter = await ProcessRatingEvents(ratingEvents, userRateLimits, usersToScrobble, totalEvents, progressCounter);
progressCounter = await ProcessRatingEvents(decisions, userRateLimits, usersToScrobble, totalEvents, addToWantToRead, removeWantToRead, progressCounter);
progressCounter = await ProcessWantToReadRatingEvents(decisions, userRateLimits, usersToScrobble, totalEvents, progressCounter);
}
catch (FlurlHttpException ex)
{
@ -840,10 +838,61 @@ public class ScrobblingService : IScrobblingService
await SaveToDb(progressCounter, true);
_logger.LogInformation("Scrobbling Events is complete");
// Cleanup any events that are due to bugs or legacy
try
{
var eventsWithoutAnilistToken = (await _unitOfWork.ScrobbleRepository.GetEvents())
.Where(e => !e.IsProcessed && !e.IsErrored)
.Where(e => string.IsNullOrEmpty(e.AppUser.AniListAccessToken));
_unitOfWork.ScrobbleRepository.Remove(eventsWithoutAnilistToken);
await _unitOfWork.CommitAsync();
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when trying to delete old scrobble events when the user has no active token");
}
}
private async Task<int> ProcessRatingEvents(List<ScrobbleEvent> decisions, Dictionary<int, int> userRateLimits, List<AppUser> usersToScrobble, int totalEvents,
List<ScrobbleEvent> addToWantToRead, List<ScrobbleEvent> removeWantToRead, int progressCounter)
/// <summary>
/// Calculates the net want-to-read decisions by considering all events.
/// Returns events that represent the final state for each user/series pair.
/// </summary>
/// <param name="addEvents">List of events for adding to want-to-read</param>
/// <param name="removeEvents">List of events for removing from want-to-read</param>
/// <returns>List of events that represent the final state (add or remove)</returns>
private static List<ScrobbleEvent> CalculateNetWantToReadDecisions(List<ScrobbleEvent> addEvents, List<ScrobbleEvent> removeEvents)
{
// Create a dictionary to track the latest event for each user/series combination
var latestEvents = new Dictionary<(int SeriesId, int AppUserId), ScrobbleEvent>();
// Process all add events
foreach (var addEvent in addEvents)
{
var key = (addEvent.SeriesId, addEvent.AppUserId);
if (latestEvents.TryGetValue(key, out var value) && addEvent.CreatedUtc <= value.CreatedUtc) continue;
value = addEvent;
latestEvents[key] = value;
}
// Process all remove events
foreach (var removeEvent in removeEvents)
{
var key = (removeEvent.SeriesId, removeEvent.AppUserId);
if (latestEvents.TryGetValue(key, out var value) && removeEvent.CreatedUtc <= value.CreatedUtc) continue;
value = removeEvent;
latestEvents[key] = value;
}
// Return all events that represent the final state
return latestEvents.Values.ToList();
}
private async Task<int> ProcessWantToReadRatingEvents(List<ScrobbleEvent> decisions, Dictionary<int, int> userRateLimits, List<AppUser> usersToScrobble, int totalEvents, int progressCounter)
{
progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter,
totalEvents, evt => Task.FromResult(new ScrobbleDto()
@ -861,15 +910,9 @@ public class ScrobblingService : IScrobblingService
}));
// After decisions, we need to mark all the want to read and remove from want to read as completed
if (decisions.All(d => d.IsProcessed))
if (decisions.Any(d => d.IsProcessed))
{
foreach (var scrobbleEvent in addToWantToRead)
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
_unitOfWork.ScrobbleRepository.Update(scrobbleEvent);
}
foreach (var scrobbleEvent in removeWantToRead)
foreach (var scrobbleEvent in decisions.Where(d => d.IsProcessed))
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
@ -899,6 +942,7 @@ public class ScrobblingService : IScrobblingService
}));
}
private async Task<int> ProcessReadEvents(List<ScrobbleEvent> readEvents, Dictionary<int, int> userRateLimits, List<AppUser> usersToScrobble, int totalEvents,
int progressCounter)
{
@ -946,6 +990,7 @@ public class ScrobblingService : IScrobblingService
.Where(user => user.UserPreferences.AniListScrobblingEnabled)
.DistinctBy(u => u.Id)
.ToList();
foreach (var user in usersToScrobble)
{
await SetAndCheckRateLimit(userRateLimits, user, license.Value);
@ -980,7 +1025,7 @@ public class ScrobblingService : IScrobblingService
SeriesId = evt.SeriesId
});
await _unitOfWork.CommitAsync();
return 0;
continue;
}
if (evt.Series.IsBlacklisted || evt.Series.DontMatch)
@ -999,7 +1044,7 @@ public class ScrobblingService : IScrobblingService
_unitOfWork.ScrobbleRepository.Update(evt);
await _unitOfWork.CommitAsync();
return 0;
continue;
}
var count = await SetAndCheckRateLimit(userRateLimits, evt.AppUser, license.Value);
@ -1042,12 +1087,12 @@ public class ScrobblingService : IScrobblingService
evt.IsErrored = true;
evt.ErrorDetails = AccessTokenErrorMessage;
_unitOfWork.ScrobbleRepository.Update(evt);
return progressCounter;
}
}
catch (Exception)
catch (Exception ex)
{
/* Swallow as it's already been handled in PostScrobbleUpdate */
_logger.LogError(ex, "Error processing event {EventId}", evt.Id);
}
await SaveToDb(progressCounter);
// We can use count to determine how long to sleep based on rate gain. It might be specific to AniList, but we can model others
@ -1061,13 +1106,10 @@ public class ScrobblingService : IScrobblingService
private async Task SaveToDb(int progressCounter, bool force = false)
{
if (!force || progressCounter % 5 == 0)
if ((force || progressCounter % 5 == 0) && _unitOfWork.HasChanges())
{
if (_unitOfWork.HasChanges())
{
_logger.LogDebug("Saving Progress");
await _unitOfWork.CommitAsync();
}
_logger.LogDebug("Saving Scrobbling Event Processing Progress");
await _unitOfWork.CommitAsync();
}
}
@ -1105,6 +1147,7 @@ public class ScrobblingService : IScrobblingService
{
var providers = new List<ScrobbleProvider>();
if (!string.IsNullOrEmpty(appUser.AniListAccessToken)) providers.Add(ScrobbleProvider.AniList);
return providers;
}

View File

@ -76,8 +76,8 @@ public static class ReviewService
plainText = Regex.Replace(plainText, @"~~", string.Empty);
plainText = Regex.Replace(plainText, @"__", string.Empty);
// Take the first 100 characters
plainText = plainText.Length > 100 ? plainText.Substring(0, BodyTextLimit) : plainText;
// Take the first BodyTextLimit characters
plainText = plainText.Length > BodyTextLimit ? plainText.Substring(0, BodyTextLimit) : plainText;
return plainText + "…";
}

View File

@ -236,6 +236,20 @@ public class TaskScheduler : ITaskScheduler
RecurringJobOptions);
}
/// <summary>
/// Removes any Kavita+ Recurring Jobs
/// </summary>
public static void RemoveKavitaPlusTasks()
{
RecurringJob.RemoveIfExists(CheckScrobblingTokensId);
RecurringJob.RemoveIfExists(LicenseCheckId);
RecurringJob.RemoveIfExists(ProcessScrobblingEventsId);
RecurringJob.RemoveIfExists(ProcessProcessedScrobblingEventsId);
RecurringJob.RemoveIfExists(KavitaPlusDataRefreshId);
RecurringJob.RemoveIfExists(KavitaPlusStackSyncId);
RecurringJob.RemoveIfExists(KavitaPlusWantToReadSyncId);
}
#region StatsTasks

View File

@ -166,8 +166,10 @@ public class ParseScannedFiles
// Don't process any folders where we've already scanned everything below
if (processedDirs.Any(d => d.StartsWith(directory + Path.AltDirectorySeparatorChar) || d.Equals(directory)))
{
var hasChanged = !HasSeriesFolderNotChangedSinceLastScan(library, seriesPaths, directory, forceCheck);
// Skip this directory as we've already processed a parent unless there are loose files at that directory
CheckSurfaceFiles(result, directory, folderPath, fileExtensions, matcher);
// and they have changes
CheckSurfaceFiles(result, directory, folderPath, fileExtensions, matcher, hasChanged);
continue;
}
@ -290,14 +292,14 @@ public class ParseScannedFiles
/// <summary>
/// Performs a full scan of the directory and adds it to the result.
/// </summary>
private void CheckSurfaceFiles(List<ScanResult> result, string directory, string folderPath, string fileExtensions, GlobMatcher matcher)
private void CheckSurfaceFiles(List<ScanResult> result, string directory, string folderPath, string fileExtensions, GlobMatcher matcher, bool hasChanged)
{
var files = _directoryService.ScanFiles(directory, fileExtensions, matcher, SearchOption.TopDirectoryOnly);
if (files.Count == 0)
{
return;
}
result.Add(CreateScanResult(directory, folderPath, true, files));
result.Add(CreateScanResult(directory, folderPath, hasChanged, files));
}
/// <summary>

View File

@ -103,6 +103,7 @@ export class UserScrobbleHistoryComponent implements OnInit {
this.isLoading = true;
this.cdRef.markForCheck();
// BUG: Table should be sorted by lastModifiedUtc by default
this.scrobblingService.getScrobbleEvents({query, field, isDescending}, page, pageSize)
.pipe(take(1))
.subscribe((result: PaginatedResult<ScrobbleEvent[]>) => {