Merge pull request #58 from Kareadita/feature/search

Search
This commit is contained in:
Joseph Milazzo 2021-02-15 13:15:46 -06:00 committed by GitHub
commit 90318e8e78
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 235 additions and 26 deletions

View File

@ -2,14 +2,18 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Helpers;
using API.Interfaces; using API.Interfaces;
using AutoMapper; using AutoMapper;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Controllers namespace API.Controllers
@ -22,16 +26,18 @@ namespace API.Controllers
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly DataContext _dataContext; // TODO: Remove, only for FTS prototyping
public LibraryController(IDirectoryService directoryService, public LibraryController(IDirectoryService directoryService,
ILogger<LibraryController> logger, IMapper mapper, ITaskScheduler taskScheduler, ILogger<LibraryController> logger, IMapper mapper, ITaskScheduler taskScheduler,
IUnitOfWork unitOfWork) IUnitOfWork unitOfWork, DataContext dataContext)
{ {
_directoryService = directoryService; _directoryService = directoryService;
_logger = logger; _logger = logger;
_mapper = mapper; _mapper = mapper;
_taskScheduler = taskScheduler; _taskScheduler = taskScheduler;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_dataContext = dataContext;
} }
/// <summary> /// <summary>
@ -213,5 +219,24 @@ namespace API.Controllers
return Ok(); return Ok();
} }
[HttpGet("search")]
public async Task<ActionResult<IEnumerable<SearchResultDto>>> Search(string queryString)
{
//NOTE: What about normalizing search query and only searching against normalizedname in Series?
// So One Punch would match One-Punch
// This also means less indexes we need.
queryString = queryString.Replace(@"%", "");
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
// Get libraries user has access to
var libraries = (await _unitOfWork.LibraryRepository.GetLibrariesForUserIdAsync(user.Id)).ToList();
if (!libraries.Any()) return BadRequest("User does not have access to any libraries");
var series = await _unitOfWork.SeriesRepository.SearchSeries(libraries.Select(l => l.Id).ToArray(), queryString);
return Ok(series);
}
} }
} }

View File

@ -33,13 +33,23 @@ namespace API.Controllers
// Temp let's iterate the directory each call to get next image // Temp let's iterate the directory each call to get next image
var chapter = await _cacheService.Ensure(chapterId); var chapter = await _cacheService.Ensure(chapterId);
if (chapter == null) return BadRequest("There was an issue finding image file for reading."); if (chapter == null) return BadRequest("There was an issue finding image file for reading");
// TODO: This code works, but might need bounds checking. UI can send bad data
// if (page >= chapter.Pages)
// {
// page = chapter.Pages - 1;
// } else if (page < 0)
// {
// page = 0;
// }
var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, page); var (path, mangaFile) = await _cacheService.GetCachedPagePath(chapter, page);
if (string.IsNullOrEmpty(path)) return BadRequest($"No such image for page {page}"); if (string.IsNullOrEmpty(path)) return BadRequest($"No such image for page {page}");
var file = await _directoryService.ReadImageAsync(path); var file = await _directoryService.ReadImageAsync(path);
file.Page = page; file.Page = page;
file.MangaFileName = mangaFile.FilePath; file.MangaFileName = mangaFile.FilePath;
file.NeedsSplitting = file.Width > file.Height;
return Ok(file); return Ok(file);
} }
@ -51,25 +61,33 @@ namespace API.Controllers
if (user.Progresses == null) return Ok(0); if (user.Progresses == null) return Ok(0);
var progress = user.Progresses.SingleOrDefault(x => x.AppUserId == user.Id && x.ChapterId == chapterId); var progress = user.Progresses.SingleOrDefault(x => x.AppUserId == user.Id && x.ChapterId == chapterId);
if (progress != null) return Ok(progress.PagesRead); return Ok(progress?.PagesRead ?? 0);
return Ok(0);
} }
[HttpPost("bookmark")] [HttpPost("bookmark")]
public async Task<ActionResult> Bookmark(BookmarkDto bookmarkDto) public async Task<ActionResult> Bookmark(BookmarkDto bookmarkDto)
{ {
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
_logger.LogInformation($"Saving {user.UserName} progress for Chapter {bookmarkDto.ChapterId} to page {bookmarkDto.PageNum}"); _logger.LogInformation("Saving {UserName} progress for Chapter {ChapterId} to page {PageNum}", user.UserName, bookmarkDto.ChapterId, bookmarkDto.PageNum);
// Don't let user bookmark past total pages.
var chapter = await _unitOfWork.VolumeRepository.GetChapterAsync(bookmarkDto.ChapterId);
if (bookmarkDto.PageNum > chapter.Pages)
{
return BadRequest("Can't bookmark past max pages");
}
if (bookmarkDto.PageNum < 0)
{
return BadRequest("Can't bookmark less than 0");
}
// TODO: Don't let user bookmark past total pages.
user.Progresses ??= new List<AppUserProgress>(); user.Progresses ??= new List<AppUserProgress>();
var userProgress = user.Progresses.SingleOrDefault(x => x.ChapterId == bookmarkDto.ChapterId && x.AppUserId == user.Id); var userProgress = user.Progresses.SingleOrDefault(x => x.ChapterId == bookmarkDto.ChapterId && x.AppUserId == user.Id);
if (userProgress == null) if (userProgress == null)
{ {
user.Progresses.Add(new AppUserProgress user.Progresses.Add(new AppUserProgress
{ {
PagesRead = bookmarkDto.PageNum, PagesRead = bookmarkDto.PageNum,

View File

@ -9,7 +9,8 @@
public int Height { get; init; } public int Height { get; init; }
public string Format { get; init; } public string Format { get; init; }
public byte[] Content { get; init; } public byte[] Content { get; init; }
public int Chapter { get; set; } //public int Chapter { get; set; }
public string MangaFileName { get; set; } public string MangaFileName { get; set; }
public bool NeedsSplitting { get; set; }
} }
} }

View File

@ -0,0 +1,7 @@
namespace API.DTOs
{
public class SearchQueryDto
{
public string QueryString { get; init; }
}
}

View File

@ -0,0 +1,16 @@
namespace API.DTOs
{
public class SearchResultDto
{
public int SeriesId { get; init; }
public string Name { get; init; }
public string OriginalName { get; init; }
public string SortName { get; init; }
public byte[] CoverImage { get; init; } // This should be optional or a thumbImage (much smaller)
// Grouping information
public string LibraryName { get; set; }
public int LibraryId { get; set; }
}
}

View File

@ -60,6 +60,15 @@ namespace API.Data
return await _context.SaveChangesAsync() > 0; return await _context.SaveChangesAsync() > 0;
} }
public async Task<IEnumerable<Library>> GetLibrariesForUserIdAsync(int userId)
{
return await _context.Library
.Include(l => l.AppUsers)
.Where(l => l.AppUsers.Select(ap => ap.Id).Contains(userId))
.AsNoTracking()
.ToListAsync();
}
public async Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync() public async Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync()
{ {
return await _context.Library return await _context.Library

View File

@ -9,6 +9,7 @@ using API.Interfaces;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace API.Data namespace API.Data
{ {
@ -16,11 +17,13 @@ namespace API.Data
{ {
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly ILogger _logger;
public SeriesRepository(DataContext context, IMapper mapper) public SeriesRepository(DataContext context, IMapper mapper, ILogger logger)
{ {
_context = context; _context = context;
_mapper = mapper; _mapper = mapper;
_logger = logger;
} }
public void Add(Series series) public void Add(Series series)
@ -74,7 +77,25 @@ namespace API.Data
await AddSeriesModifiers(userId, series); await AddSeriesModifiers(userId, series);
Console.WriteLine("Processed GetSeriesDtoForLibraryIdAsync in {0} milliseconds", sw.ElapsedMilliseconds); _logger.LogDebug("Processed GetSeriesDtoForLibraryIdAsync in {ElapsedMilliseconds} milliseconds", sw.ElapsedMilliseconds);
return series;
}
public async Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery)
{
var sw = Stopwatch.StartNew();
var series = await _context.Series
.Where(s => libraryIds.Contains(s.LibraryId))
.Where(s => EF.Functions.Like(s.Name, $"%{searchQuery}%")
|| EF.Functions.Like(s.OriginalName, $"%{searchQuery}%"))
.Include(s => s.Library) // NOTE: Is there a way to do this faster?
.OrderBy(s => s.SortName)
.AsNoTracking()
.ProjectTo<SearchResultDto>(_mapper.ConfigurationProvider)
.ToListAsync();
_logger.LogDebug("Processed SearchSeries in {ElapsedMilliseconds} milliseconds", sw.ElapsedMilliseconds);
return series; return series;
} }

View File

@ -3,6 +3,7 @@ using API.Entities;
using API.Interfaces; using API.Interfaces;
using AutoMapper; using AutoMapper;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging;
namespace API.Data namespace API.Data
{ {
@ -11,15 +12,17 @@ namespace API.Data
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly UserManager<AppUser> _userManager; private readonly UserManager<AppUser> _userManager;
private readonly ILogger<UnitOfWork> _seriesLogger;
public UnitOfWork(DataContext context, IMapper mapper, UserManager<AppUser> userManager) public UnitOfWork(DataContext context, IMapper mapper, UserManager<AppUser> userManager, ILogger<UnitOfWork> seriesLogger)
{ {
_context = context; _context = context;
_mapper = mapper; _mapper = mapper;
_userManager = userManager; _userManager = userManager;
_seriesLogger = seriesLogger;
} }
public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper); public ISeriesRepository SeriesRepository => new SeriesRepository(_context, _mapper, _seriesLogger);
public IUserRepository UserRepository => new UserRepository(_context, _userManager); public IUserRepository UserRepository => new UserRepository(_context, _userManager);
public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper); public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper);

View File

@ -30,6 +30,7 @@ namespace API.Entities
public DateTime Created { get; set; } public DateTime Created { get; set; }
public DateTime LastModified { get; set; } public DateTime LastModified { get; set; }
public byte[] CoverImage { get; set; } public byte[] CoverImage { get; set; }
// NOTE: Do I want to store a thumbImage for search results?
/// <summary> /// <summary>
/// Sum of all Volume page counts /// Sum of all Volume page counts
/// </summary> /// </summary>

View File

@ -0,0 +1,17 @@
using System.Text.Json;
using API.Helpers;
using Microsoft.AspNetCore.Http;
namespace API.Extensions
{
public static class HttpExtensions
{
public static void AddPaginationHeader(this HttpResponse response, int currentPage,
int itemsPerPage, int totalItems, int totalPages)
{
var paginationHeader = new PaginationHeader(currentPage, itemsPerPage, totalItems, totalPages);
response.Headers.Add("Pagination", JsonSerializer.Serialize(paginationHeader));
response.Headers.Add("Access-Control-Expose-Headers", "Pagination");
}
}
}

View File

@ -23,6 +23,13 @@ namespace API.Helpers
CreateMap<AppUserPreferences, UserPreferencesDto>(); CreateMap<AppUserPreferences, UserPreferencesDto>();
CreateMap<Series, SearchResultDto>()
.ForMember(dest => dest.SeriesId,
opt => opt.MapFrom(src => src.Id))
.ForMember(dest => dest.LibraryName,
opt => opt.MapFrom(src => src.Library.Name));
CreateMap<Library, LibraryDto>() CreateMap<Library, LibraryDto>()
.ForMember(dest => dest.Folders, .ForMember(dest => dest.Folders,
opt => opt =>

32
API/Helpers/PagedList.cs Normal file
View File

@ -0,0 +1,32 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
namespace API.Helpers
{
public class PagedList<T> : List<T>
{
public PagedList(IEnumerable<T> items, int count, int pageNumber, int pageSize)
{
CurrentPage = pageNumber;
TotalPages = (int) Math.Ceiling(count / (double) pageSize);
PageSize = pageSize;
TotalCount = count;
AddRange(items);
}
public int CurrentPage { get; set; }
public int TotalPages { get; set; }
public int PageSize { get; set; }
public int TotalCount { get; set; }
public static async Task<PagedList<T>> CreateAsync(IQueryable<T> source, int pageNumber, int pageSize)
{
var count = await source.CountAsync();
var items = await source.Skip((pageNumber - 1) * pageSize).Take(pageSize).ToListAsync();
return new PagedList<T>(items, count, pageNumber, pageSize);
}
}
}

View File

@ -0,0 +1,18 @@
namespace API.Helpers
{
public class PaginationHeader
{
public PaginationHeader(int currentPage, int itemsPerPage, int totalItems, int totalPages)
{
CurrentPage = currentPage;
ItemsPerPage = itemsPerPage;
TotalItems = totalItems;
TotalPages = totalPages;
}
public int CurrentPage { get; set; }
public int ItemsPerPage { get; set; }
public int TotalItems { get; set; }
public int TotalPages { get; set; }
}
}

15
API/Helpers/UserParams.cs Normal file
View File

@ -0,0 +1,15 @@
namespace API.Helpers
{
public class UserParams
{
private const int MaxPageSize = 50;
public int PageNumber { get; set; } = 1;
private int _pageSize = 10;
public int PageSize
{
get => _pageSize;
set => _pageSize = (value > MaxPageSize) ? MaxPageSize : value;
}
}
}

View File

@ -16,5 +16,6 @@ namespace API.Interfaces
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName); Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);
Task<IEnumerable<Library>> GetLibrariesAsync(); Task<IEnumerable<Library>> GetLibrariesAsync();
Task<bool> DeleteLibrary(int libraryId); Task<bool> DeleteLibrary(int libraryId);
Task<IEnumerable<Library>> GetLibrariesForUserIdAsync(int userId);
} }
} }

View File

@ -11,7 +11,19 @@ namespace API.Interfaces
void Update(Series series); void Update(Series series);
Task<Series> GetSeriesByNameAsync(string name); Task<Series> GetSeriesByNameAsync(string name);
Series GetSeriesByName(string name); Series GetSeriesByName(string name);
/// <summary>
/// Adds user information like progress, ratings, etc
/// </summary>
/// <param name="libraryId"></param>
/// <param name="userId"></param>
/// <returns></returns>
Task<IEnumerable<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId); Task<IEnumerable<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId);
/// <summary>
/// Does not add user information like progress, ratings, etc.
/// </summary>
/// <param name="libraryIds"></param>
/// <returns></returns>
Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery);
Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId); Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId);
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId); Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
IEnumerable<Volume> GetVolumes(int seriesId); IEnumerable<Volume> GetVolumes(int seriesId);

View File

@ -106,12 +106,18 @@ namespace API.Services
var chapterFiles = chapter.Files ?? await _unitOfWork.VolumeRepository.GetFilesForChapter(chapter.Id); var chapterFiles = chapter.Files ?? await _unitOfWork.VolumeRepository.GetFilesForChapter(chapter.Id);
foreach (var mangaFile in chapterFiles) foreach (var mangaFile in chapterFiles)
{ {
if (page < (mangaFile.NumberOfPages + pagesSoFar)) if (page <= (mangaFile.NumberOfPages + pagesSoFar))
{ {
var path = GetCachePath(chapter.Id); var path = GetCachePath(chapter.Id);
var files = _directoryService.GetFiles(path, Parser.Parser.ImageFileExtensions); var files = _directoryService.GetFiles(path, Parser.Parser.ImageFileExtensions);
Array.Sort(files, _numericComparer); Array.Sort(files, _numericComparer);
// Since array is 0 based, we need to keep that in account (only affects last image)
if (page == files.Length)
{
return (files.ElementAt(page - 1 - pagesSoFar), mangaFile);
}
return (files.ElementAt(page - pagesSoFar), mangaFile); return (files.ElementAt(page - pagesSoFar), mangaFile);
} }

View File

@ -52,6 +52,8 @@ namespace API.Services
public void UpdateMetadata(Series series, bool forceUpdate) public void UpdateMetadata(Series series, bool forceUpdate)
{ {
// TODO: this doesn't actually invoke finding a new cover. Also all these should be groupped ideally so we limit
// disk I/O to one method.
if (series == null) return; if (series == null) return;
if (ShouldFindCoverImage(series.CoverImage, forceUpdate)) if (ShouldFindCoverImage(series.CoverImage, forceUpdate))
{ {

View File

@ -162,36 +162,34 @@ namespace API.Services
_logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount); _logger.LogInformation("Removed {RemoveCount} series that are no longer on disk", removeCount);
// Add new series that have parsedInfos // Add new series that have parsedInfos
foreach (var info in parsedSeries) foreach (var (key, _) in parsedSeries)
{ {
var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(info.Key)); var existingSeries = library.Series.SingleOrDefault(s => s.NormalizedName == Parser.Parser.Normalize(key));
if (existingSeries == null) if (existingSeries == null)
{ {
existingSeries = new Series() existingSeries = new Series()
{ {
Name = info.Key, Name = key,
OriginalName = info.Key, OriginalName = key,
NormalizedName = Parser.Parser.Normalize(info.Key), NormalizedName = Parser.Parser.Normalize(key),
SortName = info.Key, SortName = key,
Summary = "", Summary = "",
Volumes = new List<Volume>() Volumes = new List<Volume>()
}; };
library.Series.Add(existingSeries); library.Series.Add(existingSeries);
} }
existingSeries.NormalizedName = Parser.Parser.Normalize(info.Key); existingSeries.NormalizedName = Parser.Parser.Normalize(key);
} }
int total = 0;
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
var librarySeries = library.Series.ToList(); var librarySeries = library.Series.ToList();
Parallel.ForEach<Series, int>(librarySeries, () => 0, (series, state, subtotal) => Parallel.ForEach(librarySeries, (series) =>
{ {
_logger.LogInformation("Processing series {SeriesName}", series.Name); _logger.LogInformation("Processing series {SeriesName}", series.Name);
UpdateVolumes(series, parsedSeries[series.Name].ToArray()); UpdateVolumes(series, parsedSeries[series.Name].ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages); series.Pages = series.Volumes.Sum(v => v.Pages);
_metadataService.UpdateMetadata(series, _forceUpdate); _metadataService.UpdateMetadata(series, _forceUpdate);
return 0; });
}, finalResult => Interlocked.Add(ref total, finalResult));
foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now; foreach (var folder in library.Folders) folder.LastScanned = DateTime.Now;
} }