diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 79deb8139..795832a8f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -10,7 +10,7 @@ on: jobs: build: name: Build - runs-on: ubuntu-latest + runs-on: windows-latest steps: - uses: actions/checkout@v2 with: @@ -25,9 +25,9 @@ jobs: uses: actions/setup-java@v1 with: java-version: 1.11 - - uses: actions/checkout@v2 - with: - fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - name: Cache SonarCloud packages uses: actions/cache@v1 with: diff --git a/API.Tests/Services/CacheServiceTests.cs b/API.Tests/Services/CacheServiceTests.cs index 87ed5a655..642d86e0f 100644 --- a/API.Tests/Services/CacheServiceTests.cs +++ b/API.Tests/Services/CacheServiceTests.cs @@ -1,4 +1,13 @@ -using Xunit; +using System.Collections.Generic; +using System.IO; +using API.Data; +using API.Entities; +using API.Interfaces; +using API.Interfaces.Services; +using API.Services; +using Microsoft.Extensions.Logging; +using NSubstitute; +using Xunit; namespace API.Tests.Services { @@ -9,10 +18,33 @@ namespace API.Tests.Services // private readonly IUnitOfWork _unitOfWork = Substitute.For(); // private readonly IArchiveService _archiveService = Substitute.For(); // private readonly IDirectoryService _directoryService = Substitute.For(); - + // // public CacheServiceTests() // { - // //_cacheService = new CacheService(_logger, _unitOfWork, _archiveService, _directoryService); + // _cacheService = new CacheService(_logger, _unitOfWork, _archiveService, _directoryService); + // } + + // [Fact] + // public async void Ensure_ShouldExtractArchive(int chapterId) + // { + // + // // CacheDirectory needs to be customized. + // _unitOfWork.VolumeRepository.GetChapterAsync(chapterId).Returns(new Chapter + // { + // Id = 1, + // Files = new List() + // { + // new MangaFile() + // { + // FilePath = "" + // } + // } + // }); + // + // await _cacheService.Ensure(1); + // + // var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/CacheService/Archives"); + // // } //string GetCachedPagePath(Volume volume, int page) @@ -50,7 +82,7 @@ namespace API.Tests.Services // cacheService.Configure().GetVolumeCachePath(1, volume.Files.ElementAt(0)).Returns("cache/1/"); // _directoryService.Configure().GetFilesWithExtension("cache/1/").Returns(new string[] {"pexels-photo-6551949.jpg"}); // Assert.Equal(expected, _cacheService.GetCachedPagePath(volume, pageNum)); - Assert.True(true); + //Assert.True(true); } [Fact] diff --git a/API.Tests/Services/Test Data/CacheService/Archives/file in folder in folder.zip b/API.Tests/Services/Test Data/CacheService/Archives/file in folder in folder.zip new file mode 100644 index 000000000..7598e0fa3 Binary files /dev/null and b/API.Tests/Services/Test Data/CacheService/Archives/file in folder in folder.zip differ diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs index e1c9912be..492ff6357 100644 --- a/API/Services/TaskScheduler.cs +++ b/API/Services/TaskScheduler.cs @@ -1,4 +1,5 @@ using System.IO; +using System.Linq; using System.Threading.Tasks; using API.Entities.Enums; using API.Helpers.Converters; @@ -53,33 +54,34 @@ namespace API.Services if (setting != null) { _logger.LogDebug("Scheduling Scan Library Task for {Cron}", setting); - RecurringJob.AddOrUpdate(() => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting)); + RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), () => CronConverter.ConvertToCronNotation(setting)); } else { - RecurringJob.AddOrUpdate(() => _scannerService.ScanLibraries(), Cron.Daily); + RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily); } setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value; if (setting != null) { _logger.LogDebug("Scheduling Backup Task for {Cron}", setting); - RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting)); + RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting)); } else { - RecurringJob.AddOrUpdate(() => _backupService.BackupDatabase(), Cron.Weekly); + RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly); } - RecurringJob.AddOrUpdate(() => _cleanupService.Cleanup(), Cron.Daily); + RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily); } public void ScanLibrary(int libraryId, bool forceUpdate = false) { + _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); - BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change - + //BackgroundJob.Enqueue(() => _cleanupService.Cleanup()); // When we do a scan, force cache to re-unpack in case page numbers change + RecurringJob.Trigger("cleanup"); // TODO: Alternate way to trigger jobs. Test this out and see if we should switch. } public void CleanupChapters(int[] chapterIds) diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs index 3da2dce03..5587ad0c8 100644 --- a/API/Services/Tasks/ScannerService.cs +++ b/API/Services/Tasks/ScannerService.cs @@ -63,7 +63,8 @@ namespace API.Services.Tasks _scannedSeries = null; } - //[DisableConcurrentExecution(timeoutInSeconds: 360)] + [DisableConcurrentExecution(5)] + [AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)] public void ScanLibrary(int libraryId, bool forceUpdate) { _forceUpdate = forceUpdate;