diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index e2da33628..a77338866 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -38,7 +38,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- language: [ 'csharp', 'javascript-typescript', 'python' ]
+ language: [ 'csharp', 'javascript-typescript' ]
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
diff --git a/API.Tests/Helpers/RateLimiterTests.cs b/API.Tests/Helpers/RateLimiterTests.cs
index c05ce4e6d..e9b0030b9 100644
--- a/API.Tests/Helpers/RateLimiterTests.cs
+++ b/API.Tests/Helpers/RateLimiterTests.cs
@@ -1,4 +1,5 @@
using System;
+using System.Threading.Tasks;
using API.Helpers;
using Xunit;
@@ -33,7 +34,7 @@ public class RateLimiterTests
}
[Fact]
- public void AcquireTokens_Refill()
+ public async Task AcquireTokens_Refill()
{
// Arrange
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(1));
@@ -43,14 +44,14 @@ public class RateLimiterTests
limiter.TryAcquire("test_key");
// Wait for refill
- System.Threading.Thread.Sleep(1100);
+ await Task.Delay(1100);
// Assert
Assert.True(limiter.TryAcquire("test_key"));
}
[Fact]
- public void AcquireTokens_Refill_WithOff()
+ public async Task AcquireTokens_Refill_WithOff()
{
// Arrange
var limiter = new RateLimiter(2, TimeSpan.FromSeconds(10), false);
@@ -60,7 +61,7 @@ public class RateLimiterTests
limiter.TryAcquire("test_key");
// Wait for refill
- System.Threading.Thread.Sleep(2100);
+ await Task.Delay(2100);
// Assert
Assert.False(limiter.TryAcquire("test_key"));
diff --git a/API.Tests/Helpers/StringHelperTests.cs b/API.Tests/Helpers/StringHelperTests.cs
index 76b089069..6ae079c3e 100644
--- a/API.Tests/Helpers/StringHelperTests.cs
+++ b/API.Tests/Helpers/StringHelperTests.cs
@@ -11,8 +11,22 @@ public class StringHelperTests
"
A Perfect Marriage Becomes a Perfect Affair!
Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?
",
"A Perfect Marriage Becomes a Perfect Affair!
Every woman wishes for that happily ever after, but when time flies by and you've become a neglected housewife, what's a woman to do?
"
)]
- public void Test(string input, string expected)
+ public void TestSquashBreaklines(string input, string expected)
{
Assert.Equal(expected, StringHelper.SquashBreaklines(input));
}
+
+ [Theory]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
(Source: Anime News Network)
",
+ "A Perfect Marriage Becomes a Perfect Affair!
"
+ )]
+ [InlineData(
+ "A Perfect Marriage Becomes a Perfect Affair!
(Source: Anime News Network)",
+ "A Perfect Marriage Becomes a Perfect Affair!
"
+ )]
+ public void TestRemoveSourceInDescription(string input, string expected)
+ {
+ Assert.Equal(expected, StringHelper.RemoveSourceInDescription(input));
+ }
}
diff --git a/API.Tests/Services/ParseScannedFilesTests.cs b/API.Tests/Services/ParseScannedFilesTests.cs
index b7bdaf57b..c286a9c25 100644
--- a/API.Tests/Services/ParseScannedFilesTests.cs
+++ b/API.Tests/Services/ParseScannedFilesTests.cs
@@ -451,4 +451,124 @@ public class ParseScannedFilesTests : AbstractDbTest
var changes = res.Count(sc => sc.HasChanged);
Assert.Equal(1, changes);
}
+
+ [Fact]
+ public async Task SubFoldersNoSubFolders_SkipAll()
+ {
+ const string testcase = "Subfolders and files at root - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ // Needs to be actual time as the write time is now, so if we set LastFolderChecked in the past
+ // it'll always a scan as it was changed since the last scan.
+ Thread.Sleep(1100); // Ensure at least one second has passed since library scan
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ Assert.DoesNotContain(res, sc => sc.HasChanged);
+ }
+
+ [Fact]
+ public async Task SubFoldersNoSubFolders_ScanAllAfterAddInRoot()
+ {
+ const string testcase = "Subfolders and files at root - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
+ _context.Series.Update(spiceAndWolf);
+ await _context.SaveChangesAsync();
+
+ // Add file at series root
+ var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
+ File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
+ Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ var changes = res.Count(sc => sc.HasChanged);
+ Assert.Equal(2, changes);
+ }
+
+ [Fact]
+ public async Task SubFoldersNoSubFolders_ScanAllAfterAddInSubFolder()
+ {
+ const string testcase = "Subfolders and files at root - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var fs = new FileSystem();
+ var ds = new DirectoryService(Substitute.For>(), fs);
+ var psf = new ParseScannedFiles(Substitute.For>(), ds,
+ new MockReadingItemService(ds, Substitute.For()), Substitute.For());
+
+ var scanner = _scannerHelper.CreateServices(ds, fs);
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ spiceAndWolf.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(2));
+ _context.Series.Update(spiceAndWolf);
+ await _context.SaveChangesAsync();
+
+ // Add file in subfolder
+ var spiceAndWolfDir = Path.Join(Path.Join(testDirectoryPath, "Spice and Wolf"), "Spice and Wolf Vol. 3");
+ File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0011.cbz"),
+ Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
+
+ var res = await psf.ScanFiles(testDirectoryPath, true,
+ await _unitOfWork.SeriesRepository.GetFolderPathMap(postLib.Id), postLib);
+ var changes = res.Count(sc => sc.HasChanged);
+ Assert.Equal(2, changes);
+ }
}
diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs
index 38b32ae8d..57f2293eb 100644
--- a/API.Tests/Services/ScannerServiceTests.cs
+++ b/API.Tests/Services/ScannerServiceTests.cs
@@ -55,6 +55,28 @@ public class ScannerServiceTests : AbstractDbTest
await _context.SaveChangesAsync();
}
+
+ protected async Task SetAllSeriesLastScannedInThePast(Library library, TimeSpan? duration = null)
+ {
+ foreach (var series in library.Series)
+ {
+ await SetLastScannedInThePast(series, duration, false);
+ }
+ await _context.SaveChangesAsync();
+ }
+
+ protected async Task SetLastScannedInThePast(Series series, TimeSpan? duration = null, bool save = true)
+ {
+ duration ??= TimeSpan.FromMinutes(2);
+ series.LastFolderScanned = DateTime.Now.Subtract(duration.Value);
+ _context.Series.Update(series);
+
+ if (save)
+ {
+ await _context.SaveChangesAsync();
+ }
+ }
+
[Fact]
public async Task ScanLibrary_ComicVine_PublisherFolder()
{
@@ -611,9 +633,7 @@ public class ScannerServiceTests : AbstractDbTest
File.Copy(Path.Join(root1PlushFolder, "Plush v02.cbz"), Path.Join(root1PlushFolder, "Plush v03.cbz"));
// Emulate time passage by updating lastFolderScan to be a min in the past
- s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
- _context.Series.Update(s);
- await _context.SaveChangesAsync();
+ await SetLastScannedInThePast(s);
// Rescan to ensure nothing changes yet again
await scanner.ScanLibrary(library.Id, false);
@@ -702,12 +722,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Contains(postLib.Series, s => s.Name == "Plush");
// Emulate time passage by updating lastFolderScan to be a min in the past
- foreach (var s in postLib.Series)
- {
- s.LastFolderScanned = DateTime.Now.Subtract(TimeSpan.FromMinutes(1));
- _context.Series.Update(s);
- }
- await _context.SaveChangesAsync();
+ await SetAllSeriesLastScannedInThePast(postLib);
// Fourth Scan: Run again to check stability (should not remove Accel)
await scanner.ScanLibrary(library.Id);
@@ -794,7 +809,7 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count));
- Thread.Sleep(1100); // Ensure at least one second has passed since library scan
+ await SetAllSeriesLastScannedInThePast(postLib);
// Add a new chapter to a volume of the series, and scan. Validate that no chapters were lost, and the new
// chapter was added
@@ -822,4 +837,94 @@ public class ScannerServiceTests : AbstractDbTest
Assert.Equal(2, executionerAndHerWayOfLife.Volumes.Count);
Assert.Equal(3, executionerAndHerWayOfLife.Volumes.Sum(v => v.Chapters.Count)); // Incremented by 1
}
+
+ [Fact]
+ public async Task RemovalPickedUp_NoOtherChanges()
+ {
+ const string testcase = "Series removed when no other changes are made - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var scanner = _scannerHelper.CreateServices();
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Equal(2, postLib.Series.Count);
+
+ var executionerCopyDir = Path.Join(testDirectoryPath, "The Executioner and Her Way of Life");
+ Directory.Delete(executionerCopyDir, true);
+
+ await scanner.ScanLibrary(library.Id);
+ await _unitOfWork.CommitAsync();
+
+ postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+ Assert.Single(postLib.Series, s => s.Name == "Spice and Wolf");
+ Assert.Equal(2, postLib.Series.First().Volumes.Count);
+ }
+
+ [Fact]
+ public async Task SubFoldersNoSubFolders_CorrectPickupAfterAdd()
+ {
+ // This test case is used in multiple tests and can result in conflict if not separated
+ const string testcase = "Subfolders and files at root (2) - Manga.json";
+ var infos = new Dictionary();
+ var library = await _scannerHelper.GenerateScannerData(testcase, infos);
+ var testDirectoryPath = library.Folders.First().Path;
+
+ _unitOfWork.LibraryRepository.Update(library);
+ await _unitOfWork.CommitAsync();
+
+ var scanner = _scannerHelper.CreateServices();
+ await scanner.ScanLibrary(library.Id);
+
+ var postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ var spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(3, spiceAndWolf.Volumes.Count);
+ Assert.Equal(4, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ await SetLastScannedInThePast(spiceAndWolf);
+
+ // Add volume to Spice and Wolf series directory
+ var spiceAndWolfDir = Path.Join(testDirectoryPath, "Spice and Wolf");
+ File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 1.cbz"),
+ Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 4.cbz"));
+
+ await scanner.ScanLibrary(library.Id);
+
+ postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(4, spiceAndWolf.Volumes.Count);
+ Assert.Equal(5, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ await SetLastScannedInThePast(spiceAndWolf);
+
+ // Add file in subfolder
+ spiceAndWolfDir = Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3");
+ File.Copy(Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0012.cbz"),
+ Path.Join(spiceAndWolfDir, "Spice and Wolf Vol. 3 Ch. 0013.cbz"));
+
+ await scanner.ScanLibrary(library.Id);
+
+ postLib = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(library.Id, LibraryIncludes.Series);
+ Assert.NotNull(postLib);
+ Assert.Single(postLib.Series);
+
+ spiceAndWolf = postLib.Series.First(x => x.Name == "Spice and Wolf");
+ Assert.Equal(4, spiceAndWolf.Volumes.Count);
+ Assert.Equal(6, spiceAndWolf.Volumes.Sum(v => v.Chapters.Count));
+
+ }
}
diff --git a/API.Tests/Services/ScrobblingServiceTests.cs b/API.Tests/Services/ScrobblingServiceTests.cs
index d460ee4e5..b7a418d83 100644
--- a/API.Tests/Services/ScrobblingServiceTests.cs
+++ b/API.Tests/Services/ScrobblingServiceTests.cs
@@ -1,11 +1,208 @@
-using API.Services.Plus;
+using System.Linq;
+using System.Threading.Tasks;
+using API.DTOs.Scrobbling;
+using API.Entities.Enums;
+using API.Helpers.Builders;
+using API.Services;
+using API.Services.Plus;
+using API.SignalR;
+using Microsoft.Extensions.Logging;
+using NSubstitute;
using Xunit;
namespace API.Tests.Services;
#nullable enable
-public class ScrobblingServiceTests
+public class ScrobblingServiceTests : AbstractDbTest
{
+ private readonly ScrobblingService _service;
+ private readonly ILicenseService _licenseService;
+ private readonly ILocalizationService _localizationService;
+ private readonly ILogger _logger;
+ private readonly IEmailService _emailService;
+
+ public ScrobblingServiceTests()
+ {
+ _licenseService = Substitute.For();
+ _localizationService = Substitute.For();
+ _logger = Substitute.For>();
+ _emailService = Substitute.For();
+
+ _service = new ScrobblingService(_unitOfWork, Substitute.For(), _logger, _licenseService, _localizationService, _emailService);
+ }
+
+ protected override async Task ResetDb()
+ {
+ _context.ScrobbleEvent.RemoveRange(_context.ScrobbleEvent.ToList());
+ _context.Series.RemoveRange(_context.Series.ToList());
+ _context.Library.RemoveRange(_context.Library.ToList());
+ _context.AppUser.RemoveRange(_context.AppUser.ToList());
+
+ await _unitOfWork.CommitAsync();
+ }
+
+ private async Task SeedData()
+ {
+ var series = new SeriesBuilder("Test Series")
+ .WithFormat(MangaFormat.Archive)
+ .WithMetadata(new SeriesMetadataBuilder().Build())
+ .Build();
+
+ var library = new LibraryBuilder("Test Library", LibraryType.Manga)
+ .WithAllowScrobbling(true)
+ .WithSeries(series)
+ .Build();
+
+
+ _context.Library.Add(library);
+
+ var user = new AppUserBuilder("testuser", "testuser")
+ //.WithPreferences(new UserPreferencesBuilder().WithAniListScrobblingEnabled(true).Build())
+ .Build();
+
+ user.UserPreferences.AniListScrobblingEnabled = true;
+
+ _unitOfWork.UserRepository.Add(user);
+
+ await _unitOfWork.CommitAsync();
+ }
+
+ #region ScrobbleWantToReadUpdate Tests
+
+ [Fact]
+ public async Task ScrobbleWantToReadUpdate_NoExistingEvents_WantToRead_ShouldCreateNewEvent()
+ {
+ // Arrange
+ await SeedData();
+ _licenseService.HasActiveLicense().Returns(Task.FromResult(true));
+
+ const int userId = 1;
+ const int seriesId = 1;
+
+ // Act
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
+
+ // Assert
+ var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
+ Assert.Single(events);
+ Assert.Equal(ScrobbleEventType.AddWantToRead, events[0].ScrobbleEventType);
+ Assert.Equal(userId, events[0].AppUserId);
+ }
+
+ [Fact]
+ public async Task ScrobbleWantToReadUpdate_NoExistingEvents_RemoveWantToRead_ShouldCreateNewEvent()
+ {
+ // Arrange
+ await SeedData();
+ _licenseService.HasActiveLicense().Returns(Task.FromResult(true));
+
+ const int userId = 1;
+ const int seriesId = 1;
+
+ // Act
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
+
+ // Assert
+ var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
+ Assert.Single(events);
+ Assert.Equal(ScrobbleEventType.RemoveWantToRead, events[0].ScrobbleEventType);
+ Assert.Equal(userId, events[0].AppUserId);
+ }
+
+ [Fact]
+ public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_WantToRead_ShouldNotCreateNewEvent()
+ {
+ // Arrange
+ await SeedData();
+ _licenseService.HasActiveLicense().Returns(Task.FromResult(true));
+
+ const int userId = 1;
+ const int seriesId = 1;
+
+ // First, let's create an event through the service
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
+
+ // Act - Try to create the same event again
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
+
+ // Assert
+ var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
+
+ Assert.Single(events);
+ Assert.All(events, e => Assert.Equal(ScrobbleEventType.AddWantToRead, e.ScrobbleEventType));
+ }
+
+ [Fact]
+ public async Task ScrobbleWantToReadUpdate_ExistingWantToReadEvent_RemoveWantToRead_ShouldAddRemoveEvent()
+ {
+ // Arrange
+ await SeedData();
+ _licenseService.HasActiveLicense().Returns(Task.FromResult(true));
+
+ const int userId = 1;
+ const int seriesId = 1;
+
+ // First, let's create a want-to-read event through the service
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
+
+ // Act - Now remove from want-to-read
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
+
+ // Assert
+ var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
+
+ Assert.Single(events);
+ Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.RemoveWantToRead);
+ }
+
+ [Fact]
+ public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_RemoveWantToRead_ShouldNotCreateNewEvent()
+ {
+ // Arrange
+ await SeedData();
+ _licenseService.HasActiveLicense().Returns(Task.FromResult(true));
+
+ const int userId = 1;
+ const int seriesId = 1;
+
+ // First, let's create a remove-from-want-to-read event through the service
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
+
+ // Act - Try to create the same event again
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
+
+ // Assert
+ var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
+
+ Assert.Single(events);
+ Assert.All(events, e => Assert.Equal(ScrobbleEventType.RemoveWantToRead, e.ScrobbleEventType));
+ }
+
+ [Fact]
+ public async Task ScrobbleWantToReadUpdate_ExistingRemoveWantToReadEvent_WantToRead_ShouldAddWantToReadEvent()
+ {
+ // Arrange
+ await SeedData();
+ _licenseService.HasActiveLicense().Returns(Task.FromResult(true));
+
+ const int userId = 1;
+ const int seriesId = 1;
+
+ // First, let's create a remove-from-want-to-read event through the service
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, false);
+
+ // Act - Now add to want-to-read
+ await _service.ScrobbleWantToReadUpdate(userId, seriesId, true);
+
+ // Assert
+ var events = await _unitOfWork.ScrobbleRepository.GetAllEventsForSeries(seriesId);
+
+ Assert.Single(events);
+ Assert.Contains(events, e => e.ScrobbleEventType == ScrobbleEventType.AddWantToRead);
+ }
+
+ #endregion
+
[Theory]
[InlineData("https://anilist.co/manga/35851/Byeontaega-Doeja/", 35851)]
[InlineData("https://anilist.co/manga/30105", 30105)]
diff --git a/API.Tests/Services/Test Data/ScannerService/TestCases/Series removed when no other changes are made - Manga.json b/API.Tests/Services/Test Data/ScannerService/TestCases/Series removed when no other changes are made - Manga.json
new file mode 100644
index 000000000..c6ea3bc88
--- /dev/null
+++ b/API.Tests/Services/Test Data/ScannerService/TestCases/Series removed when no other changes are made - Manga.json
@@ -0,0 +1,6 @@
+[
+ "Spice and Wolf/Spice and Wolf Vol. 1.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 2.cbz",
+ "The Executioner and Her Way of Life/The Executioner and Her Way of Life Vol. 1.cbz",
+ "The Executioner and Her Way of Life/The Executioner and Her Way of Life Vol. 2.cbz"
+]
diff --git a/API.Tests/Services/Test Data/ScannerService/TestCases/Subfolders and files at root (2) - Manga.json b/API.Tests/Services/Test Data/ScannerService/TestCases/Subfolders and files at root (2) - Manga.json
new file mode 100644
index 000000000..103ea421a
--- /dev/null
+++ b/API.Tests/Services/Test Data/ScannerService/TestCases/Subfolders and files at root (2) - Manga.json
@@ -0,0 +1,6 @@
+[
+ "Spice and Wolf/Spice and Wolf Vol. 1.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 2.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0011.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0012.cbz"
+]
diff --git a/API.Tests/Services/Test Data/ScannerService/TestCases/Subfolders and files at root - Manga.json b/API.Tests/Services/Test Data/ScannerService/TestCases/Subfolders and files at root - Manga.json
new file mode 100644
index 000000000..103ea421a
--- /dev/null
+++ b/API.Tests/Services/Test Data/ScannerService/TestCases/Subfolders and files at root - Manga.json
@@ -0,0 +1,6 @@
+[
+ "Spice and Wolf/Spice and Wolf Vol. 1.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 2.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0011.cbz",
+ "Spice and Wolf/Spice and Wolf Vol. 3/Spice and Wolf Vol. 3 Ch. 0012.cbz"
+]
diff --git a/API/Controllers/LicenseController.cs b/API/Controllers/LicenseController.cs
index 77e35540e..30ed68771 100644
--- a/API/Controllers/LicenseController.cs
+++ b/API/Controllers/LicenseController.cs
@@ -12,6 +12,7 @@ using Hangfire;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
+using TaskScheduler = API.Services.TaskScheduler;
namespace API.Controllers;
@@ -81,6 +82,10 @@ public class LicenseController(
}
}
+ ///
+ /// Remove the Kavita+ License on the Server
+ ///
+ ///
[Authorize("RequireAdminRole")]
[HttpDelete]
[ResponseCache(CacheProfileName = ResponseCacheProfiles.LicenseCache)]
@@ -91,7 +96,9 @@ public class LicenseController(
setting.Value = null;
unitOfWork.SettingsRepository.Update(setting);
await unitOfWork.CommitAsync();
- await taskScheduler.ScheduleKavitaPlusTasks();
+
+ TaskScheduler.RemoveKavitaPlusTasks();
+
return Ok();
}
diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs
index d41a4bac0..dadfc74b7 100644
--- a/API/Controllers/SeriesController.cs
+++ b/API/Controllers/SeriesController.cs
@@ -99,7 +99,7 @@ public class SeriesController : BaseApiController
///
///
[HttpPost("v2")]
- public async Task>> GetSeriesForLibraryV2([FromQuery] UserParams userParams, [FromBody] FilterV2Dto filterDto)
+ public async Task>> GetSeriesForLibraryV2([FromQuery] UserParams userParams, [FromBody] FilterV2Dto filterDto)
{
var userId = User.GetUserId();
var series =
diff --git a/API/Data/Repositories/ScrobbleEventRepository.cs b/API/Data/Repositories/ScrobbleEventRepository.cs
index 5ba2de9f6..0addd7473 100644
--- a/API/Data/Repositories/ScrobbleEventRepository.cs
+++ b/API/Data/Repositories/ScrobbleEventRepository.cs
@@ -32,6 +32,8 @@ public interface IScrobbleRepository
Task> GetUserEventsForSeries(int userId, int seriesId);
Task> GetUserEvents(int userId, ScrobbleEventFilter filter, UserParams pagination);
Task> GetAllEventsForSeries(int seriesId);
+ Task> GetAllEventsWithSeriesIds(IEnumerable seriesIds);
+ Task> GetEvents();
}
///
@@ -97,6 +99,11 @@ public class ScrobbleRepository : IScrobbleRepository
.ToListAsync();
}
+ ///
+ /// Returns all processed events that were processed 7 or more days ago
+ ///
+ ///
+ ///
public async Task> GetProcessedEvents(int daysAgo)
{
var date = DateTime.UtcNow.Subtract(TimeSpan.FromDays(daysAgo));
@@ -172,8 +179,22 @@ public class ScrobbleRepository : IScrobbleRepository
public async Task> GetAllEventsForSeries(int seriesId)
{
- return await _context.ScrobbleEvent.Where(e => e.SeriesId == seriesId)
+ return await _context.ScrobbleEvent
+ .Where(e => e.SeriesId == seriesId)
.ToListAsync();
}
+ public async Task> GetAllEventsWithSeriesIds(IEnumerable seriesIds)
+ {
+ return await _context.ScrobbleEvent
+ .Where(e => seriesIds.Contains(e.SeriesId))
+ .ToListAsync();
+ }
+
+ public async Task> GetEvents()
+ {
+ return await _context.ScrobbleEvent
+ .Include(e => e.AppUser)
+ .ToListAsync();
+ }
}
diff --git a/API/Helpers/StringHelper.cs b/API/Helpers/StringHelper.cs
index 4f3fa44f6..bd063b2a7 100644
--- a/API/Helpers/StringHelper.cs
+++ b/API/Helpers/StringHelper.cs
@@ -3,8 +3,19 @@
namespace API.Helpers;
#nullable enable
-public static class StringHelper
+public static partial class StringHelper
{
+ #region Regex Source Generators
+ [GeneratedRegex(@"\s?\(Source:\s*[^)]+\)")]
+ private static partial Regex SourceRegex();
+ [GeneratedRegex(@"
", RegexOptions.IgnoreCase | RegexOptions.Compiled, "en-US")]
+ private static partial Regex BrStandardizeRegex();
+ [GeneratedRegex(@"(?:
\s*)+", RegexOptions.IgnoreCase | RegexOptions.Compiled, "en-US")]
+ private static partial Regex BrMultipleRegex();
+ [GeneratedRegex(@"\s+")]
+ private static partial Regex WhiteSpaceRegex();
+ #endregion
+
///
/// Used to squash duplicate break and new lines with a single new line.
///
@@ -19,13 +30,13 @@ public static class StringHelper
}
// First standardize all br tags to
format
- summary = Regex.Replace(summary, @"
", "
", RegexOptions.IgnoreCase | RegexOptions.Compiled);
+ summary = BrStandardizeRegex().Replace(summary, "
");
// Replace multiple consecutive br tags with a single br tag
- summary = Regex.Replace(summary, @"(?:
\s*)+", "
", RegexOptions.IgnoreCase | RegexOptions.Compiled);
+ summary = BrMultipleRegex().Replace(summary, "
");
// Normalize remaining whitespace (replace multiple spaces with a single space)
- summary = Regex.Replace(summary, @"\s+", " ").Trim();
+ summary = WhiteSpaceRegex().Replace(summary, " ").Trim();
return summary.Trim();
}
@@ -37,6 +48,8 @@ public static class StringHelper
///
public static string? RemoveSourceInDescription(string? description)
{
- return description?.Trim();
+ if (string.IsNullOrEmpty(description)) return description;
+
+ return SourceRegex().Replace(description, string.Empty).Trim();
}
}
diff --git a/API/Services/Plus/ExternalMetadataService.cs b/API/Services/Plus/ExternalMetadataService.cs
index ade35024a..a73701cf8 100644
--- a/API/Services/Plus/ExternalMetadataService.cs
+++ b/API/Services/Plus/ExternalMetadataService.cs
@@ -231,7 +231,7 @@ public class ExternalMetadataService : IExternalMetadataService
// Some summaries can contain multiple
s, we need to ensure it's only 1
foreach (var result in results)
{
- result.Series.Summary = StringHelper.SquashBreaklines(result.Series.Summary);
+ result.Series.Summary = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(result.Series.Summary));
}
return results;
@@ -730,7 +730,7 @@ public class ExternalMetadataService : IExternalMetadataService
{
Name = w.Name,
AniListId = ScrobblingService.ExtractId(w.Url, ScrobblingService.AniListCharacterWebsite),
- Description = StringHelper.SquashBreaklines(w.Description),
+ Description = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(w.Description)),
})
.Concat(series.Metadata.People
.Where(p => p.Role == PersonRole.Character)
@@ -809,7 +809,7 @@ public class ExternalMetadataService : IExternalMetadataService
{
Name = w.Name,
AniListId = ScrobblingService.ExtractId(w.Url, ScrobblingService.AniListStaffWebsite),
- Description = StringHelper.SquashBreaklines(w.Description),
+ Description = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(w.Description)),
})
.Concat(series.Metadata.People
.Where(p => p.Role == PersonRole.CoverArtist)
@@ -866,7 +866,7 @@ public class ExternalMetadataService : IExternalMetadataService
{
Name = w.Name,
AniListId = ScrobblingService.ExtractId(w.Url, ScrobblingService.AniListStaffWebsite),
- Description = StringHelper.SquashBreaklines(w.Description),
+ Description = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(w.Description)),
})
.Concat(series.Metadata.People
.Where(p => p.Role == PersonRole.Writer)
@@ -1122,7 +1122,7 @@ public class ExternalMetadataService : IExternalMetadataService
return false;
}
- series.Metadata.Summary = StringHelper.SquashBreaklines(externalMetadata.Summary);
+ series.Metadata.Summary = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(externalMetadata.Summary));
return true;
}
@@ -1432,7 +1432,7 @@ public class ExternalMetadataService : IExternalMetadataService
.PostJsonAsync(payload)
.ReceiveJson();
- ret.Summary = StringHelper.SquashBreaklines(ret.Summary);
+ ret.Summary = StringHelper.RemoveSourceInDescription(StringHelper.SquashBreaklines(ret.Summary));
return ret;
diff --git a/API/Services/Plus/ScrobblingService.cs b/API/Services/Plus/ScrobblingService.cs
index c67f6c73f..8d1e2d85a 100644
--- a/API/Services/Plus/ScrobblingService.cs
+++ b/API/Services/Plus/ScrobblingService.cs
@@ -165,17 +165,15 @@ public class ScrobblingService : IScrobblingService
private async Task ShouldSendEarlyReminder(int userId, DateTime tokenExpiry)
{
var earlyReminderDate = tokenExpiry.AddDays(-5);
- if (earlyReminderDate <= DateTime.UtcNow)
- {
- var hasAlreadySentReminder = await _unitOfWork.DataContext.EmailHistory
- .AnyAsync(h => h.AppUserId == userId && h.Sent &&
- h.EmailTemplate == EmailService.TokenExpiringSoonTemplate &&
- h.SendDate >= earlyReminderDate);
+ if (earlyReminderDate > DateTime.UtcNow) return false;
- return !hasAlreadySentReminder;
- }
+ var hasAlreadySentReminder = await _unitOfWork.DataContext.EmailHistory
+ .AnyAsync(h => h.AppUserId == userId && h.Sent &&
+ h.EmailTemplate == EmailService.TokenExpiringSoonTemplate &&
+ h.SendDate >= earlyReminderDate);
+
+ return !hasAlreadySentReminder;
- return false;
}
///
@@ -183,17 +181,15 @@ public class ScrobblingService : IScrobblingService
///
private async Task ShouldSendExpirationReminder(int userId, DateTime tokenExpiry)
{
- if (tokenExpiry <= DateTime.UtcNow)
- {
- var hasAlreadySentExpirationEmail = await _unitOfWork.DataContext.EmailHistory
- .AnyAsync(h => h.AppUserId == userId && h.Sent &&
- h.EmailTemplate == EmailService.TokenExpirationTemplate &&
- h.SendDate >= tokenExpiry);
+ if (tokenExpiry > DateTime.UtcNow) return false;
- return !hasAlreadySentExpirationEmail;
- }
+ var hasAlreadySentExpirationEmail = await _unitOfWork.DataContext.EmailHistory
+ .AnyAsync(h => h.AppUserId == userId && h.Sent &&
+ h.EmailTemplate == EmailService.TokenExpirationTemplate &&
+ h.SendDate >= tokenExpiry);
+
+ return !hasAlreadySentExpirationEmail;
- return false;
}
@@ -433,10 +429,17 @@ public class ScrobblingService : IScrobblingService
if (await CheckIfCannotScrobble(userId, seriesId, series)) return;
_logger.LogInformation("Processing Scrobbling want-to-read event for {UserId} on {SeriesName}", userId, series.Name);
- var existing = await _unitOfWork.ScrobbleRepository.Exists(userId, series.Id,
- onWantToRead ? ScrobbleEventType.AddWantToRead : ScrobbleEventType.RemoveWantToRead);
- if (existing) return; // BUG: If I take a series and add to remove from want to read, then add to want to read, Kavita rejects the second as a duplicate, when it's not
+ // Get existing events for this series/user
+ var existingEvents = (await _unitOfWork.ScrobbleRepository.GetUserEventsForSeries(userId, seriesId))
+ .Where(e => new[] { ScrobbleEventType.AddWantToRead, ScrobbleEventType.RemoveWantToRead }.Contains(e.ScrobbleEventType));
+ // Remove all existing want-to-read events for this series/user
+ foreach (var existingEvent in existingEvents)
+ {
+ _unitOfWork.ScrobbleRepository.Remove(existingEvent);
+ }
+
+ // Create the new event
var evt = new ScrobbleEvent()
{
SeriesId = series.Id,
@@ -447,6 +450,7 @@ public class ScrobblingService : IScrobblingService
AppUserId = userId,
Format = series.Library.Type.ConvertToPlusMediaFormat(series.Format),
};
+
_unitOfWork.ScrobbleRepository.Attach(evt);
await _unitOfWork.CommitAsync();
_logger.LogDebug("Added Scrobbling WantToRead update on {SeriesName} with Userid {UserId} ", series.Name, userId);
@@ -465,6 +469,7 @@ public class ScrobblingService : IScrobblingService
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(series.LibraryId);
if (library is not {AllowScrobbling: true}) return true;
if (!ExternalMetadataService.IsPlusEligible(library.Type)) return true;
+
return false;
}
@@ -481,7 +486,7 @@ public class ScrobblingService : IScrobblingService
}
catch (Exception e)
{
- _logger.LogError(e, "An error happened during the request to Kavita+ API");
+ _logger.LogError(e, "An error happened trying to get rate limit from Kavita+ API");
}
return 0;
@@ -737,8 +742,10 @@ public class ScrobblingService : IScrobblingService
[AutomaticRetry(Attempts = 3, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ClearProcessedEvents()
{
- var events = await _unitOfWork.ScrobbleRepository.GetProcessedEvents(7);
+ const int daysAgo = 7;
+ var events = await _unitOfWork.ScrobbleRepository.GetProcessedEvents(daysAgo);
_unitOfWork.ScrobbleRepository.Remove(events);
+ _logger.LogInformation("Removing {Count} scrobble events that have been processed {DaysAgo}+ days ago", events.Count, daysAgo);
await _unitOfWork.CommitAsync();
}
@@ -752,7 +759,6 @@ public class ScrobblingService : IScrobblingService
{
// Check how many scrobble events we have available then only do those.
var userRateLimits = new Dictionary();
- var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
var progressCounter = 0;
@@ -784,32 +790,24 @@ public class ScrobblingService : IScrobblingService
.Where(e => !errors.Contains(e.SeriesId))
.ToList();
- var decisions = addToWantToRead
- .GroupBy(item => new { item.SeriesId, item.AppUserId })
- .Select(group => new
- {
- group.Key.SeriesId,
- UserId = group.Key.AppUserId,
- Event = group.First(),
- Decision = group.Count() - removeWantToRead
- .Count(removeItem => removeItem.SeriesId == group.Key.SeriesId && removeItem.AppUserId == group.Key.AppUserId)
- })
- .Where(d => d.Decision > 0)
- .Select(d => d.Event)
- .ToList();
+ var decisions = CalculateNetWantToReadDecisions(addToWantToRead, removeWantToRead);
- // Get all the applicable users to scrobble and set their rate limits
- var usersToScrobble = await PrepareUsersToScrobble(readEvents, addToWantToRead, removeWantToRead, ratingEvents, userRateLimits, license);
+ // Clear any events that are already on error table
+ var erroredEvents = await _unitOfWork.ScrobbleRepository.GetAllEventsWithSeriesIds(errors);
+ if (erroredEvents.Count > 0)
+ {
+ _unitOfWork.ScrobbleRepository.Remove(erroredEvents);
+ await _unitOfWork.CommitAsync();
+ }
var totalEvents = readEvents.Count + decisions.Count + ratingEvents.Count;
+ if (totalEvents == 0) return;
+ // Get all the applicable users to scrobble and set their rate limits
+ var license = await _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.LicenseKey);
+ var usersToScrobble = await PrepareUsersToScrobble(readEvents, addToWantToRead, removeWantToRead, ratingEvents, userRateLimits, license);
- if (totalEvents == 0)
- {
- return;
- }
-
_logger.LogInformation("Scrobble Processing Details:" +
"\n Read Events: {ReadEventsCount}" +
"\n Want to Read Events: {WantToReadEventsCount}" +
@@ -828,7 +826,7 @@ public class ScrobblingService : IScrobblingService
progressCounter = await ProcessRatingEvents(ratingEvents, userRateLimits, usersToScrobble, totalEvents, progressCounter);
- progressCounter = await ProcessRatingEvents(decisions, userRateLimits, usersToScrobble, totalEvents, addToWantToRead, removeWantToRead, progressCounter);
+ progressCounter = await ProcessWantToReadRatingEvents(decisions, userRateLimits, usersToScrobble, totalEvents, progressCounter);
}
catch (FlurlHttpException ex)
{
@@ -840,10 +838,61 @@ public class ScrobblingService : IScrobblingService
await SaveToDb(progressCounter, true);
_logger.LogInformation("Scrobbling Events is complete");
+ // Cleanup any events that are due to bugs or legacy
+ try
+ {
+ var eventsWithoutAnilistToken = (await _unitOfWork.ScrobbleRepository.GetEvents())
+ .Where(e => !e.IsProcessed && !e.IsErrored)
+ .Where(e => string.IsNullOrEmpty(e.AppUser.AniListAccessToken));
+
+ _unitOfWork.ScrobbleRepository.Remove(eventsWithoutAnilistToken);
+ await _unitOfWork.CommitAsync();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was an exception when trying to delete old scrobble events when the user has no active token");
+ }
}
- private async Task ProcessRatingEvents(List decisions, Dictionary userRateLimits, List usersToScrobble, int totalEvents,
- List addToWantToRead, List removeWantToRead, int progressCounter)
+ ///
+ /// Calculates the net want-to-read decisions by considering all events.
+ /// Returns events that represent the final state for each user/series pair.
+ ///
+ /// List of events for adding to want-to-read
+ /// List of events for removing from want-to-read
+ /// List of events that represent the final state (add or remove)
+ private static List CalculateNetWantToReadDecisions(List addEvents, List removeEvents)
+ {
+ // Create a dictionary to track the latest event for each user/series combination
+ var latestEvents = new Dictionary<(int SeriesId, int AppUserId), ScrobbleEvent>();
+
+ // Process all add events
+ foreach (var addEvent in addEvents)
+ {
+ var key = (addEvent.SeriesId, addEvent.AppUserId);
+
+ if (latestEvents.TryGetValue(key, out var value) && addEvent.CreatedUtc <= value.CreatedUtc) continue;
+
+ value = addEvent;
+ latestEvents[key] = value;
+ }
+
+ // Process all remove events
+ foreach (var removeEvent in removeEvents)
+ {
+ var key = (removeEvent.SeriesId, removeEvent.AppUserId);
+
+ if (latestEvents.TryGetValue(key, out var value) && removeEvent.CreatedUtc <= value.CreatedUtc) continue;
+
+ value = removeEvent;
+ latestEvents[key] = value;
+ }
+
+ // Return all events that represent the final state
+ return latestEvents.Values.ToList();
+ }
+
+ private async Task ProcessWantToReadRatingEvents(List decisions, Dictionary userRateLimits, List usersToScrobble, int totalEvents, int progressCounter)
{
progressCounter = await ProcessEvents(decisions, userRateLimits, usersToScrobble.Count, progressCounter,
totalEvents, evt => Task.FromResult(new ScrobbleDto()
@@ -861,15 +910,9 @@ public class ScrobblingService : IScrobblingService
}));
// After decisions, we need to mark all the want to read and remove from want to read as completed
- if (decisions.All(d => d.IsProcessed))
+ if (decisions.Any(d => d.IsProcessed))
{
- foreach (var scrobbleEvent in addToWantToRead)
- {
- scrobbleEvent.IsProcessed = true;
- scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
- _unitOfWork.ScrobbleRepository.Update(scrobbleEvent);
- }
- foreach (var scrobbleEvent in removeWantToRead)
+ foreach (var scrobbleEvent in decisions.Where(d => d.IsProcessed))
{
scrobbleEvent.IsProcessed = true;
scrobbleEvent.ProcessDateUtc = DateTime.UtcNow;
@@ -899,6 +942,7 @@ public class ScrobblingService : IScrobblingService
}));
}
+
private async Task ProcessReadEvents(List readEvents, Dictionary userRateLimits, List usersToScrobble, int totalEvents,
int progressCounter)
{
@@ -946,6 +990,7 @@ public class ScrobblingService : IScrobblingService
.Where(user => user.UserPreferences.AniListScrobblingEnabled)
.DistinctBy(u => u.Id)
.ToList();
+
foreach (var user in usersToScrobble)
{
await SetAndCheckRateLimit(userRateLimits, user, license.Value);
@@ -980,7 +1025,7 @@ public class ScrobblingService : IScrobblingService
SeriesId = evt.SeriesId
});
await _unitOfWork.CommitAsync();
- return 0;
+ continue;
}
if (evt.Series.IsBlacklisted || evt.Series.DontMatch)
@@ -999,7 +1044,7 @@ public class ScrobblingService : IScrobblingService
_unitOfWork.ScrobbleRepository.Update(evt);
await _unitOfWork.CommitAsync();
- return 0;
+ continue;
}
var count = await SetAndCheckRateLimit(userRateLimits, evt.AppUser, license.Value);
@@ -1042,12 +1087,12 @@ public class ScrobblingService : IScrobblingService
evt.IsErrored = true;
evt.ErrorDetails = AccessTokenErrorMessage;
_unitOfWork.ScrobbleRepository.Update(evt);
- return progressCounter;
}
}
- catch (Exception)
+ catch (Exception ex)
{
/* Swallow as it's already been handled in PostScrobbleUpdate */
+ _logger.LogError(ex, "Error processing event {EventId}", evt.Id);
}
await SaveToDb(progressCounter);
// We can use count to determine how long to sleep based on rate gain. It might be specific to AniList, but we can model others
@@ -1061,13 +1106,10 @@ public class ScrobblingService : IScrobblingService
private async Task SaveToDb(int progressCounter, bool force = false)
{
- if (!force || progressCounter % 5 == 0)
+ if ((force || progressCounter % 5 == 0) && _unitOfWork.HasChanges())
{
- if (_unitOfWork.HasChanges())
- {
- _logger.LogDebug("Saving Progress");
- await _unitOfWork.CommitAsync();
- }
+ _logger.LogDebug("Saving Scrobbling Event Processing Progress");
+ await _unitOfWork.CommitAsync();
}
}
@@ -1105,6 +1147,7 @@ public class ScrobblingService : IScrobblingService
{
var providers = new List();
if (!string.IsNullOrEmpty(appUser.AniListAccessToken)) providers.Add(ScrobbleProvider.AniList);
+
return providers;
}
diff --git a/API/Services/ReviewService.cs b/API/Services/ReviewService.cs
index c2c876b4b..e9468ecba 100644
--- a/API/Services/ReviewService.cs
+++ b/API/Services/ReviewService.cs
@@ -76,8 +76,8 @@ public static class ReviewService
plainText = Regex.Replace(plainText, @"~~", string.Empty);
plainText = Regex.Replace(plainText, @"__", string.Empty);
- // Take the first 100 characters
- plainText = plainText.Length > 100 ? plainText.Substring(0, BodyTextLimit) : plainText;
+ // Take the first BodyTextLimit characters
+ plainText = plainText.Length > BodyTextLimit ? plainText.Substring(0, BodyTextLimit) : plainText;
return plainText + "…";
}
diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs
index 0aeb8c189..3000bbd31 100644
--- a/API/Services/TaskScheduler.cs
+++ b/API/Services/TaskScheduler.cs
@@ -236,6 +236,20 @@ public class TaskScheduler : ITaskScheduler
RecurringJobOptions);
}
+ ///
+ /// Removes any Kavita+ Recurring Jobs
+ ///
+ public static void RemoveKavitaPlusTasks()
+ {
+ RecurringJob.RemoveIfExists(CheckScrobblingTokensId);
+ RecurringJob.RemoveIfExists(LicenseCheckId);
+ RecurringJob.RemoveIfExists(ProcessScrobblingEventsId);
+ RecurringJob.RemoveIfExists(ProcessProcessedScrobblingEventsId);
+ RecurringJob.RemoveIfExists(KavitaPlusDataRefreshId);
+ RecurringJob.RemoveIfExists(KavitaPlusStackSyncId);
+ RecurringJob.RemoveIfExists(KavitaPlusWantToReadSyncId);
+ }
+
#region StatsTasks
diff --git a/API/Services/Tasks/Scanner/ParseScannedFiles.cs b/API/Services/Tasks/Scanner/ParseScannedFiles.cs
index d18d4a2f2..08abb1c07 100644
--- a/API/Services/Tasks/Scanner/ParseScannedFiles.cs
+++ b/API/Services/Tasks/Scanner/ParseScannedFiles.cs
@@ -166,8 +166,10 @@ public class ParseScannedFiles
// Don't process any folders where we've already scanned everything below
if (processedDirs.Any(d => d.StartsWith(directory + Path.AltDirectorySeparatorChar) || d.Equals(directory)))
{
+ var hasChanged = !HasSeriesFolderNotChangedSinceLastScan(library, seriesPaths, directory, forceCheck);
// Skip this directory as we've already processed a parent unless there are loose files at that directory
- CheckSurfaceFiles(result, directory, folderPath, fileExtensions, matcher);
+ // and they have changes
+ CheckSurfaceFiles(result, directory, folderPath, fileExtensions, matcher, hasChanged);
continue;
}
@@ -290,14 +292,14 @@ public class ParseScannedFiles
///
/// Performs a full scan of the directory and adds it to the result.
///
- private void CheckSurfaceFiles(List result, string directory, string folderPath, string fileExtensions, GlobMatcher matcher)
+ private void CheckSurfaceFiles(List result, string directory, string folderPath, string fileExtensions, GlobMatcher matcher, bool hasChanged)
{
var files = _directoryService.ScanFiles(directory, fileExtensions, matcher, SearchOption.TopDirectoryOnly);
if (files.Count == 0)
{
return;
}
- result.Add(CreateScanResult(directory, folderPath, true, files));
+ result.Add(CreateScanResult(directory, folderPath, hasChanged, files));
}
///
diff --git a/UI/Web/src/app/_single-module/user-scrobble-history/user-scrobble-history.component.ts b/UI/Web/src/app/_single-module/user-scrobble-history/user-scrobble-history.component.ts
index e28153e2a..d237e4d7c 100644
--- a/UI/Web/src/app/_single-module/user-scrobble-history/user-scrobble-history.component.ts
+++ b/UI/Web/src/app/_single-module/user-scrobble-history/user-scrobble-history.component.ts
@@ -103,6 +103,7 @@ export class UserScrobbleHistoryComponent implements OnInit {
this.isLoading = true;
this.cdRef.markForCheck();
+ // BUG: Table should be sorted by lastModifiedUtc by default
this.scrobblingService.getScrobbleEvents({query, field, isDescending}, page, pageSize)
.pipe(take(1))
.subscribe((result: PaginatedResult) => {