Misc Polish (#1569)

* Introduced a lock for DB work during the scan to hopefully reduce the concurrency issues

* Don't allow multiple theme scans to occur

* Fixed bulk actions not having all actions due to nested actionable menu changes

* Refactored the Scan loop to be synchronous to avoid any issues. After first loop, no real performance issues.

* Updated the LibraryWatcher when under many internal buffer full issues, to suspend watching for a full hour, to allow whatever downloading to complete.

* Removed Semaphore as it's not needed anymore

* Updated the output for logger to explicitly say from Kavita (if you're pushing to Seq)

* Fixed a broken test

* Fixed ReleaseYear not populating due to a change from a contributor around how to populate ReleaseYear.

* Ensure when scan folder runs, that we don't double enqueue the same tasks.

* Fixed user settings not loading the correct tab

* Changed the Release Year -> Release

* Added more refresh hooks in reader to hopefully ensure faster refreshes

* Reset images between chapter loads to help flush image faster. Don't show broken image icon when an image is still loading.

* Fixed the prefetcher not properly loading the correct images and hence, allowing a bit of lag between chapter loads.

* Code smells
This commit is contained in:
Joe Milazzo 2022-10-04 19:40:34 -05:00 committed by GitHub
parent 097ec32842
commit 58bbba29cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 208 additions and 45 deletions

View File

@ -410,7 +410,7 @@ public class BookmarkServiceTests
#region Misc
[Fact]
public async Task ShouldNotDeleteBookmarkOnChapterDeletion()
public async Task ShouldNotDeleteBookmark_OnChapterDeletion()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
@ -462,8 +462,6 @@ public class BookmarkServiceTests
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
var bookmarkService = Create(ds);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
var vol = await _unitOfWork.VolumeRepository.GetVolumeAsync(1);
vol.Chapters = new List<Chapter>();
@ -475,5 +473,72 @@ public class BookmarkServiceTests
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
}
[Fact]
public async Task ShouldNotDeleteBookmark_OnVolumeDeletion()
{
var filesystem = CreateFileSystem();
filesystem.AddFile($"{CacheDirectory}1/0001.jpg", new MockFileData("123"));
filesystem.AddFile($"{BookmarkDirectory}1/1/0001.jpg", new MockFileData("123"));
// Delete all Series to reset state
await ResetDB();
var series = new Series()
{
Name = "Test",
Library = new Library()
{
Name = "Test LIb",
Type = LibraryType.Manga,
},
Volumes = new List<Volume>()
{
new Volume()
{
Chapters = new List<Chapter>()
{
new Chapter()
{
}
}
}
}
};
_context.Series.Add(series);
_context.AppUser.Add(new AppUser()
{
UserName = "Joe",
Bookmarks = new List<AppUserBookmark>()
{
new AppUserBookmark()
{
Page = 1,
ChapterId = 1,
FileName = $"1/1/0001.jpg",
SeriesId = 1,
VolumeId = 1
}
}
});
await _context.SaveChangesAsync();
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(1, AppUserIncludes.Bookmarks);
Assert.NotEmpty(user.Bookmarks);
series.Volumes = new List<Volume>();
_unitOfWork.SeriesRepository.Update(series);
await _unitOfWork.CommitAsync();
var ds = new DirectoryService(Substitute.For<ILogger<DirectoryService>>(), filesystem);
Assert.Single(ds.GetFiles(BookmarkDirectory, searchOption:SearchOption.AllDirectories));
Assert.NotNull(await _unitOfWork.UserRepository.GetBookmarkAsync(1));
}
#endregion
}

View File

@ -53,7 +53,7 @@ internal class MockReadingItemService : IReadingItemService
public void Extract(string fileFilePath, string targetDirectory, MangaFormat format, int imageCount = 1)
{
throw new System.NotImplementedException();
throw new NotImplementedException();
}
public ParserInfo Parse(string path, string rootPath, LibraryType type)
@ -245,11 +245,11 @@ public class ParseScannedFilesTests
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
void TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
{
var skippedScan = parsedInfo.Item1;
var parsedFiles = parsedInfo.Item2;
if (parsedFiles.Count == 0) return;
if (parsedFiles.Count == 0) return Task.CompletedTask;
var foundParsedSeries = new ParsedSeries()
{
@ -259,6 +259,7 @@ public class ParseScannedFilesTests
};
parsedSeries.Add(foundParsedSeries, parsedFiles);
return Task.CompletedTask;
}

View File

@ -213,9 +213,11 @@ public class LibraryController : BaseApiController
{
var userId = await _unitOfWork.UserRepository.GetUserIdByApiKeyAsync(dto.ApiKey);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
// Validate user has Admin privileges
var isAdmin = await _unitOfWork.UserRepository.IsUserAdminAsync(user);
if (!isAdmin) return BadRequest("API key must belong to an admin");
if (dto.FolderPath.Contains("..")) return BadRequest("Invalid Path");
dto.FolderPath = Services.Tasks.Scanner.Parser.Parser.NormalizePath(dto.FolderPath);

View File

@ -11,8 +11,8 @@ public class AppUserBookmark : IEntityDate
{
public int Id { get; set; }
public int Page { get; set; }
public int VolumeId { get; set; }
public int SeriesId { get; set; }
public int VolumeId { get; set; }
public int ChapterId { get; set; }
/// <summary>

View File

@ -40,7 +40,7 @@ public static class LogLevelOptions
public static LoggerConfiguration CreateConfig(LoggerConfiguration configuration)
{
const string outputTemplate = "[{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} {CorrelationId} {ThreadId}] [{Level}] {SourceContext} {Message:lj}{NewLine}{Exception}";
const string outputTemplate = "[Kavita] [{Timestamp:yyyy-MM-dd HH:mm:ss.fff zzz} {CorrelationId} {ThreadId}] [{Level}] {SourceContext} {Message:lj}{NewLine}{Exception}";
return configuration
.MinimumLevel
.ControlledBy(LogLevelSwitch)

View File

@ -158,7 +158,13 @@ public class TaskScheduler : ITaskScheduler
public void ScanSiteThemes()
{
_logger.LogInformation("Starting Site Theme scan");
if (HasAlreadyEnqueuedTask("ThemeService", "Scan", Array.Empty<object>(), ScanQueue))
{
_logger.LogInformation("A Theme Scan is already running");
return;
}
_logger.LogInformation("Enqueueing Site Theme scan");
BackgroundJob.Enqueue(() => _themeService.Scan());
}

View File

@ -51,6 +51,14 @@ public class LibraryWatcher : ILibraryWatcher
/// <remarks>The Job will be enqueued instantly</remarks>
private readonly TimeSpan _queueWaitTime;
/// <summary>
/// Counts within a time frame how many times the buffer became full. Is used to reschedule LibraryWatcher to start monitoring much later rather than instantly
/// </summary>
private int _bufferFullCounter = 0;
private DateTime _lastBufferOverflow = DateTime.MinValue;
public LibraryWatcher(IDirectoryService directoryService, IUnitOfWork unitOfWork, ILogger<LibraryWatcher> logger, IScannerService scannerService, IHostEnvironment environment)
{
@ -118,6 +126,9 @@ public class LibraryWatcher : ILibraryWatcher
public async Task RestartWatching()
{
_logger.LogDebug("[LibraryWatcher] Restarting watcher");
UpdateBufferOverflow();
StopWatching();
await StartWatching();
}
@ -151,6 +162,15 @@ public class LibraryWatcher : ILibraryWatcher
private void OnError(object sender, ErrorEventArgs e)
{
_logger.LogError(e.GetException(), "[LibraryWatcher] An error occured, likely too many changes occured at once or the folder being watched was deleted. Restarting Watchers");
_bufferFullCounter += 1;
_lastBufferOverflow = DateTime.Now;
if (_bufferFullCounter >= 3)
{
_logger.LogInformation("[LibraryWatcher] Internal buffer has been overflown multiple times in past 10 minutes. Suspending file watching for an hour");
BackgroundJob.Schedule(() => RestartWatching(), TimeSpan.FromHours(1));
return;
}
Task.Run(RestartWatching);
}
@ -162,8 +182,11 @@ public class LibraryWatcher : ILibraryWatcher
/// <remarks>This is public only because Hangfire will invoke it. Do not call external to this class.</remarks>
/// <param name="filePath">File or folder that changed</param>
/// <param name="isDirectoryChange">If the change is on a directory and not a file</param>
// ReSharper disable once MemberCanBePrivate.Global
public async Task ProcessChange(string filePath, bool isDirectoryChange = false)
{
UpdateBufferOverflow();
var sw = Stopwatch.StartNew();
_logger.LogDebug("[LibraryWatcher] Processing change of {FilePath}", filePath);
try
@ -232,4 +255,15 @@ public class LibraryWatcher : ILibraryWatcher
// Select the first folder and join with library folder, this should give us the folder to scan.
return Parser.Parser.NormalizePath(_directoryService.FileSystem.Path.Join(libraryFolder, rootFolder.First()));
}
private void UpdateBufferOverflow()
{
if (_bufferFullCounter == 0) return;
// If the last buffer overflow is over 5 mins back, we can remove a buffer count
if (_lastBufferOverflow < DateTime.Now.Subtract(TimeSpan.FromMinutes(5)))
{
_bufferFullCounter = Math.Min(0, _bufferFullCounter - 1);
_lastBufferOverflow = DateTime.Now;
}
}
}

View File

@ -223,7 +223,7 @@ public class ParseScannedFiles
/// <returns></returns>
public async Task ScanLibrariesForSeries(LibraryType libraryType,
IEnumerable<string> folders, string libraryName, bool isLibraryScan,
IDictionary<string, IList<SeriesModified>> seriesPaths, Action<Tuple<bool, IList<ParserInfo>>> processSeriesInfos, bool forceCheck = false)
IDictionary<string, IList<SeriesModified>> seriesPaths, Func<Tuple<bool, IList<ParserInfo>>, Task> processSeriesInfos, bool forceCheck = false)
{
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.FileScanProgressEvent("File Scan Starting", libraryName, ProgressEventType.Started));
@ -242,7 +242,7 @@ public class ParseScannedFiles
Series = fp.SeriesName,
Format = fp.Format,
}).ToList();
processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(true, parsedInfos));
_logger.LogDebug("Skipped File Scan for {Folder} as it hasn't changed since last scan", folder);
return;
}
@ -280,7 +280,7 @@ public class ParseScannedFiles
{
if (scannedSeries[series].Count > 0 && processSeriesInfos != null)
{
processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
await processSeriesInfos.Invoke(new Tuple<bool, IList<ParserInfo>>(false, scannedSeries[series]));
}
}
}, forceCheck);

View File

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using API.Data;
using API.Data.Metadata;
@ -48,8 +49,6 @@ public class ProcessSeries : IProcessSeries
private volatile IList<Person> _people;
private volatile IList<Tag> _tags;
public ProcessSeries(IUnitOfWork unitOfWork, ILogger<ProcessSeries> logger, IEventHub eventHub,
IDirectoryService directoryService, ICacheHelper cacheHelper, IReadingItemService readingItemService,
IFileService fileService, IMetadataService metadataService, IWordCountAnalyzerService wordCountAnalyzerService)
@ -167,7 +166,9 @@ public class ProcessSeries : IProcessSeries
catch (Exception ex)
{
await _unitOfWork.RollbackAsync();
_logger.LogCritical(ex, "[ScannerService] There was an issue writing to the database for series {@SeriesName}", series.Name);
_logger.LogCritical(ex,
"[ScannerService] There was an issue writing to the database for series {@SeriesName}",
series.Name);
await _eventHub.SendMessageAsync(MessageFactory.Error,
MessageFactory.ErrorEvent($"There was an issue writing to the DB for Series {series}",
@ -234,7 +235,7 @@ public class ProcessSeries : IProcessSeries
var chapters = series.Volumes.SelectMany(volume => volume.Chapters).ToList();
// Update Metadata based on Chapter metadata
series.Metadata.ReleaseYear = chapters.Min(c => c.ReleaseDate.Year);
series.Metadata.ReleaseYear = chapters.Select(v => v.ReleaseDate.Year).Where(y => y >= 1000).Min();
if (series.Metadata.ReleaseYear < 1000)
{
@ -439,6 +440,7 @@ public class ProcessSeries : IProcessSeries
_logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
foreach (var volumeNumber in distinctVolumes)
{
_logger.LogDebug("[ScannerService] Looking up volume for {volumeNumber}", volumeNumber);
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
if (volume == null)
{

View File

@ -102,6 +102,12 @@ public class ScannerService : IScannerService
var seriesId = await _unitOfWork.SeriesRepository.GetSeriesIdByFolder(folder);
if (seriesId > 0)
{
if (TaskScheduler.HasAlreadyEnqueuedTask(Name, "ScanSeries",
new object[] {seriesId, true}))
{
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this series. Dropping request", folder);
return;
}
BackgroundJob.Enqueue(() => ScanSeries(seriesId, true));
return;
}
@ -119,6 +125,12 @@ public class ScannerService : IScannerService
var library = libraries.FirstOrDefault(l => l.Folders.Select(Scanner.Parser.Parser.NormalizePath).Contains(libraryFolder));
if (library != null)
{
if (TaskScheduler.HasAlreadyEnqueuedTask(Name, "ScanLibrary",
new object[] {library.Id, false}))
{
_logger.LogInformation("[ScannerService] Scan folder invoked for {Folder} but a task is already queued for this library. Dropping request", folder);
return;
}
BackgroundJob.Enqueue(() => ScanLibrary(library.Id, false));
}
}
@ -175,13 +187,11 @@ public class ScannerService : IScannerService
}
var parsedSeries = new Dictionary<ParsedSeries, IList<ParserInfo>>();
var processTasks = new List<Task>();
await _eventHub.SendMessageAsync(MessageFactory.NotificationProgress, MessageFactory.LibraryScanProgressEvent(library.Name, ProgressEventType.Started, series.Name));
await _processSeries.Prime();
void TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
async Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
{
var parsedFiles = parsedInfo.Item2;
if (parsedFiles.Count == 0) return;
@ -198,7 +208,7 @@ public class ScannerService : IScannerService
return;
}
processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library));
await _processSeries.ProcessSeriesAsync(parsedFiles, library);
parsedSeries.Add(foundParsedSeries, parsedFiles);
}
@ -424,7 +434,7 @@ public class ScannerService : IScannerService
await _processSeries.Prime();
var processTasks = new List<Task>();
void TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
async Task TrackFiles(Tuple<bool, IList<ParserInfo>> parsedInfo)
{
var skippedScan = parsedInfo.Item1;
var parsedFiles = parsedInfo.Item2;
@ -452,7 +462,7 @@ public class ScannerService : IScannerService
seenSeries.Add(foundParsedSeries);
processTasks.Add(_processSeries.ProcessSeriesAsync(parsedFiles, library));
await _processSeries.ProcessSeriesAsync(parsedFiles, library);
}
@ -512,7 +522,7 @@ public class ScannerService : IScannerService
}
private async Task<long> ScanFiles(Library library, IEnumerable<string> dirs,
bool isLibraryScan, Action<Tuple<bool, IList<ParserInfo>>> processSeriesInfos = null, bool forceChecks = false)
bool isLibraryScan, Func<Tuple<bool, IList<ParserInfo>>, Task> processSeriesInfos = null, bool forceChecks = false)
{
var scanner = new ParseScannedFiles(_logger, _directoryService, _readingItemService, _eventHub);
var scanWatch = Stopwatch.StartNew();

View File

@ -543,12 +543,28 @@ export class ActionFactoryService {
});
}
private applyCallbackToList(list: Array<ActionItem<any>>, callback: (action: ActionItem<any>, data: any) => void): Array<ActionItem<any>> {
public applyCallbackToList(list: Array<ActionItem<any>>, callback: (action: ActionItem<any>, data: any) => void): Array<ActionItem<any>> {
const actions = list.map((a) => {
return { ...a };
});
actions.forEach((action) => this.applyCallback(action, callback));
return actions;
}
// Checks the whole tree for the action and returns true if it exists
public hasAction(actions: Array<ActionItem<any>>, action: Action) {
var actionFound = false;
if (actions.length === 0) return actionFound;
for (let i = 0; i < actions.length; i++)
{
if (actions[i].action === action) return true;
if (this.hasAction(actions[i].children, action)) return true;
}
return actionFound;
}
}

View File

@ -1,6 +1,6 @@
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, Input, OnDestroy, OnInit } from '@angular/core';
import { Subject, takeUntil } from 'rxjs';
import { Action, ActionItem } from 'src/app/_services/action-factory.service';
import { Action, ActionFactoryService, ActionItem } from 'src/app/_services/action-factory.service';
import { BulkSelectionService } from '../bulk-selection.service';
@Component({
@ -24,14 +24,15 @@ export class BulkOperationsComponent implements OnInit, OnDestroy {
return Action;
}
constructor(public bulkSelectionService: BulkSelectionService, private readonly cdRef: ChangeDetectorRef) { }
constructor(public bulkSelectionService: BulkSelectionService, private readonly cdRef: ChangeDetectorRef,
private actionFactoryService: ActionFactoryService) { }
ngOnInit(): void {
this.bulkSelectionService.actions$.pipe(takeUntil(this.onDestory)).subscribe(actions => {
actions.forEach(a => a.callback = this.actionCallback.bind(this));
this.actions = actions;
this.hasMarkAsRead = this.actions.filter(act => act.action === Action.MarkAsRead).length > 0;
this.hasMarkAsUnread = this.actions.filter(act => act.action === Action.MarkAsUnread).length > 0;
// We need to do a recursive callback apply
this.actions = this.actionFactoryService.applyCallbackToList(actions, this.actionCallback.bind(this));
this.hasMarkAsRead = this.actionFactoryService.hasAction(this.actions, Action.MarkAsRead);
this.hasMarkAsUnread = this.actionFactoryService.hasAction(this.actions, Action.MarkAsUnread);
this.cdRef.markForCheck();
});
}
@ -46,9 +47,7 @@ export class BulkOperationsComponent implements OnInit, OnDestroy {
}
performAction(action: ActionItem<any>) {
if (typeof action.callback === 'function') {
action.callback(action, null);
}
this.actionCallback(action, null);
}
executeAction(action: Action) {

View File

@ -142,16 +142,17 @@ export class BulkSelectionService {
getActions(callback: (action: ActionItem<any>, data: any) => void) {
// checks if series is present. If so, returns only series actions
// else returns volume/chapter items
const allowedActions = [Action.AddToReadingList, Action.MarkAsRead, Action.MarkAsUnread, Action.AddToCollection, Action.Delete, Action.AddToWantToReadList, Action.RemoveFromWantToReadList];
const allowedActions = [Action.AddToReadingList, Action.MarkAsRead, Action.MarkAsUnread, Action.AddToCollection,
Action.Delete, Action.AddToWantToReadList, Action.RemoveFromWantToReadList];
if (Object.keys(this.selectedCards).filter(item => item === 'series').length > 0) {
return this.actionFactory.getSeriesActions(callback).filter(item => allowedActions.includes(item.action));
return this.applyFilterToList(this.actionFactory.getSeriesActions(callback), allowedActions);
}
if (Object.keys(this.selectedCards).filter(item => item === 'bookmark').length > 0) {
return this.actionFactory.getBookmarkActions(callback);
}
return this.actionFactory.getVolumeActions(callback).filter(item => allowedActions.includes(item.action));
return this.applyFilterToList(this.actionFactory.getVolumeActions(callback), allowedActions);
}
private debugLog(message: string, extraData?: any) {
@ -163,4 +164,29 @@ export class BulkSelectionService {
console.log(message);
}
}
private applyFilter(action: ActionItem<any>, allowedActions: Array<Action>) {
var ret = false;
if (action.action === Action.Submenu || allowedActions.includes(action.action)) {
// Do something
ret = true;
}
if (action.children === null || action.children?.length === 0) return ret;
action.children = action.children.filter((childAction) => this.applyFilter(childAction, allowedActions));
// action.children?.forEach((childAction) => {
// this.applyFilter(childAction, allowedActions);
// });
return ret;
}
private applyFilterToList(list: Array<ActionItem<any>>, allowedActions: Array<Action>): Array<ActionItem<any>> {
const actions = list.map((a) => {
return { ...a };
});
return actions.filter(action => this.applyFilter(action, allowedActions));
}
}

View File

@ -1,7 +1,7 @@
<div class="row g-0 mb-4 mt-3">
<ng-container *ngIf="seriesMetadata.releaseYear > 0">
<div class="col-lg-1 col-md-4 col-sm-4 col-4 mb-3">
<app-icon-and-title label="Release Year" [clickable]="false" fontClasses="fa-regular fa-calendar" title="Release Year">
<app-icon-and-title label="Release" [clickable]="false" fontClasses="fa-regular fa-calendar" title="Release Year">
{{seriesMetadata.releaseYear}}
</app-icon-and-title>
</div>

View File

@ -66,11 +66,11 @@
'fit-to-height-double-offset': FittingOption === FITTING_OPTION.HEIGHT && ShouldRenderDoublePage,
'original-double-offset' : FittingOption === FITTING_OPTION.ORIGINAL && ShouldRenderDoublePage}"
[style.filter]="'brightness(' + generalSettingsForm.get('darkness')?.value + '%)' | safeStyle" (dblclick)="bookmarkPage($event)">
<img #image [src]="canvasImage.src" id="image-1"
<img alt=" " #image [src]="canvasImage.src" id="image-1"
class="{{getFittingOptionClass()}} {{readerMode === ReaderMode.LeftRight || readerMode === ReaderMode.UpDown ? '' : 'd-none'}} {{showClickOverlay ? 'blur' : ''}}">
<ng-container *ngIf="(this.canvasImage2.src !== '') && (readerService.imageUrlToPageNum(canvasImage2.src) <= maxPages - 1 && !isCoverImage())">
<img [src]="canvasImage2.src" id="image-2" class="image-2 {{getFittingOptionClass()}} {{readerMode === ReaderMode.LeftRight || readerMode === ReaderMode.UpDown ? '' : 'd-none'}} {{showClickOverlay ? 'blur' : ''}}"> <!-- {{ShouldRenderReverseDouble ? 'reverse' : ''}} -->
<img alt=" " [src]="canvasImage2.src" id="image-2" class="image-2 {{getFittingOptionClass()}} {{readerMode === ReaderMode.LeftRight || readerMode === ReaderMode.UpDown ? '' : 'd-none'}} {{showClickOverlay ? 'blur' : ''}}"> <!-- {{ShouldRenderReverseDouble ? 'reverse' : ''}} -->
</ng-container>
</div>

View File

@ -661,6 +661,8 @@ export class MangaReaderComponent implements OnInit, AfterViewInit, OnDestroy {
this.pageNum = 0;
this.pagingDirection = PAGING_DIRECTION.FORWARD;
this.inSetup = true;
this.canvasImage.src = '';
this.canvasImage2.src = '';
this.cdRef.markForCheck();
if (this.goToPageEvent) {
@ -1042,8 +1044,7 @@ export class MangaReaderComponent implements OnInit, AfterViewInit, OnDestroy {
this.isCoverImage()
|| this.isWideImage(this.canvasImagePrev)
) ? 2 : 1;
}
if (this.layoutMode === LayoutMode.DoubleReversed) {
} else if (this.layoutMode === LayoutMode.DoubleReversed) {
pageAmount = !(
this.isCoverImage()
|| this.isCoverImage(this.pageNum - 1)
@ -1300,13 +1301,14 @@ export class MangaReaderComponent implements OnInit, AfterViewInit, OnDestroy {
* and also maintains page info (wide image, etc) due to onload event.
*/
prefetch() {
for(let i = 1; i <= PREFETCH_PAGES - 3; i++) {
for(let i = 0; i <= PREFETCH_PAGES - 3; i++) {
const numOffset = this.pageNum + i;
if (numOffset > this.maxPages - 1) continue;
const index = numOffset % this.cachedImages.length;
const index = (numOffset % this.cachedImages.length + this.cachedImages.length) % this.cachedImages.length;
if (this.readerService.imageUrlToPageNum(this.cachedImages[index].src) !== numOffset) {
this.cachedImages[index].src = this.getPageUrl(numOffset);
this.cachedImages[index].onload = () => this.cdRef.markForCheck();
}
}

View File

@ -61,7 +61,7 @@ export class UserPreferencesComponent implements OnInit, OnDestroy {
{title: 'Theme', fragment: FragmentID.Theme},
{title: 'Devices', fragment: FragmentID.Devices},
];
active = this.tabs[0];
active = this.tabs[1];
opdsEnabled: boolean = false;
makeUrl: (val: string) => string = (val: string) => {return this.transformKeyToOpdsUrl(val)};
@ -87,7 +87,7 @@ export class UserPreferencesComponent implements OnInit, OnDestroy {
if (tab.length > 0) {
this.active = tab[0];
} else {
this.active = this.tabs[0]; // Default to first tab
this.active = this.tabs[1]; // Default to preferences
}
this.cdRef.markForCheck();
});