diff --git a/API.Benchmark/API.Benchmark.csproj b/API.Benchmark/API.Benchmark.csproj
index 529f3cc93..d63d24ddc 100644
--- a/API.Benchmark/API.Benchmark.csproj
+++ b/API.Benchmark/API.Benchmark.csproj
@@ -15,4 +15,10 @@
+
+
+ Always
+
+
+
diff --git a/API.Benchmark/Data/SeriesNamesForNormalization.txt b/API.Benchmark/Data/SeriesNamesForNormalization.txt
new file mode 100644
index 000000000..99ae529fd
--- /dev/null
+++ b/API.Benchmark/Data/SeriesNamesForNormalization.txt
@@ -0,0 +1,573 @@
+Liar-Game
+Your Lie in April
+Love Hina
+Love Hina
+A Chronicle of the Last Pagans
+Otherworldly Munchkin - Let's Speedrun the Dungeon with Only 1 HP!
+Love Hina
+Rokka - Braves of the Six Flowers
+Real Account
+Bakekano
+Yancha Gal no Anjou-san
+Moshi Fanren
+The Devil Is a Part-Timer!
+My Home Hero
+Itoshi no Karin
+Claymore
+Dolls Fall
+Dragons Rioting
+Tokyo Ghoul - re
+Hajime no Ippo
+Mahoromatic
+DEATHTOPIA
+Negima! Neo - Magister Negi Magi
+Ichinensei ni Nacchattara
+How NOT to Summon a Demon Lord
+U12
+"Don't Toy With Me, Miss Nagatoro"
+Karakai Jouzu no Takagi-san
+UQ Holder!
+"Ore no Nounai Sentakushi ga, Gakuen Rabukome o Zenryoku de Jama Shite Iru"
+Do Chokkyuu Kareshi x Kanojo
+Ana Satsujin
+Deus Ex Machina
+Hidan no Aria
+Bokura wa Minna Kawaisou
+Epigraph of the Closed Curve
+Ibitsu
+Rave Master
+Lunar Legend Tsukihime
+Starving Anonymous
+High-Rise Invasion
+Fuuka
+Dai Dark
+Zero no Tsukaima Chevalier
+Cells at Work! CODE BLACK
+004 Cut Hero
+Renjoh Desperado
+Himegoto - Juukyuusai No Seifuku
+Shark Skin Man and Peach Hip Girl
+Tokyo Revengers
+Fire Punch
+Boarding School Juliet
+Mushihime
+Sankarea - Undying Love
+Hanako and the Terror of Allegory
+Mad Chimera World
+Kono Subarashii Sekai ni Bakuen wo!
+21st Century Boys
+Kono Subarashii Sekai ni Shukufuku wo! Megumin Anthology
+Konosuba
+Iinari
+Shimoneta - Manmaru Hen
+Ichiban Ushiro No Daimaou
+Yamada-kun and the Seven Witches
+Busou Shoujo Machiavellism
+Negative Happy Chainsaw Edge
+Stravaganza - Isai No Hime
+Seraph of the End - Vampire Reign 095
+Seraph of the End - Vampire Reign 098
+Kokkoku - Moment by Moment
+Magico
+Samurai Harem - Asu no Yoichi
+Change123
+Shomin Sample
+Eureka SeveN
+Kekkaishi
+Goblin Slayer Side Story - Year One
+Yomeiro Choice
+Okusama wa Shougakusei
+Monster No Goshujin-Sama
+Ase To Sekken
+How Do We Relationship
+Hantsu x Torasshu
+Magical Girl Apocalypse
+I Am a Hero
+Air Gear
+Dolly Kill Kill
+Blue Exorcist
+Kingdom of Z
+The Fable
+Mairimashita! Iruma-kun
+Spy x Family
+Goblin Slayer - Brand New Day
+Yesterday wo Utatte
+Mujaki No Rakuen
+Summer Time Rendering
+Eureka Seven Gravity Boys and Lifting Girl
+06
+Domestic Girlfriend
+Imperfect Girl
+Chrno Crusade
+Higurashi no Naku Koro ni Kai - Tsumihoroboshihen
+Nande koko ni sensei ga!
+Fukukaichou Ganbaru.
+Fraction
+Kono Subarashii Sekai ni Shukufuku wo! Megumin Anthology Aka
+Mouryou no Yurikago
+Ral Ω Grad
+Shomin Sample I Was Abducted by an Elite All-Girls School as a Sample Commoner
+City of Love Prison
+Tsugumomo
+Highschool of the Dead - Edition
+Cynthia The Mission
+Amano Megumi wa Suki Darake!
+Aria The Scarlet Ammo
+Noblesse
+Outlanders
+Bleach
+Kimi ni Todoke
+Corpse Party - Another Child
+The Heroic Legend of Arslan
+Fujiyama-San Wa Shishunki
+Let's Go Play
+Astra Lost in Space
+Mirai Nikki
+Doubt
+Again!!
+Gesellschaft Blume
+Momo The Blood Taker
+World's End Harem - Fantasia
+Tengoku Daimakyou
+Amaenaideyo MS
+Cage of Eden
+Arifureta - From Commonplace to World's Strongest
+"The 100 Girlfriends Who Really, Really, Really, Really, Really Love You"
+Frogman
+Chaika - The Coffin Princess
+Pandora Hearts
+I'm Not a Lolicon!
+Criminale!
+Drifting Net Cafe
+Kono Subarashii Sekai ni Nichijou wo!
+Tomodachi Game
+Accel World
+Sun-Ken Rock
+Parallel Paradise
+Otherworldly Munchkin - Let's Speedrun the Dungeon with Only 1 HP!
+Hentai Ouji to Warawanai Neko. Nya!
+Gokukoku no Brynhildr
+Rosario+Vampire Season 2
+Higurashi no Naku Koro ni - Tatarigoroshihen
+BEASTARS
+Grenadier
+The Duke of Death and His Black Maid
+Helck
+Ijousha no Ai
+Beelzebub
+Infection
+"Ota Tomo ga Kareshi ni Nattara, Saikou, Kamo Shirenai"
+Battle Vixens
+Kimi ha midara na Boku no Joou
+Immortal Hounds
+Battle Angel Alita
+My Monster Secret
+Blood Rain
+Kakegurui - Compulsive Gambler
+Combatants Will Be Dispatched!
+Tenjo Tenge - Digital Colored Comics
+Dorohedoro
+Tower Of God
+Toradora!
+Spice and Wolf
+Loose Relation Between Wizard and Apprentice
+Kaguya-sama - Love Is War - Digital Colored Comics
+RaW Hero
+Aiki
+Jagaaaaaan
+Gleipnir
+Darwin's Game
+I'm Standing on a Million Lives
+Battle Club
+School Rumble Z
+Wotakoi - Love Is Hard for Otaku
+Majimoji Rurumo
+Suisei no Gargantia
+Madan No Ou To Vanadis
+Full Metal Panic - Sigma
+Konosuba - An Explosion on This Wonderful World!
+Seraph of the End - Vampire Reign 096
+Higurashi no Naku Koro ni - Onikakushihen
+Corpse Party Cemetery 0 - Kaibyaku No Ars Moriendi
+World's End Harem
+Jack Frost
+The Men Who Created The Prison School Anime
+My Hero Academia
+Elfen Lied
+Berserk
+Witchcraft Works
+Chobits 20th Anniversary Edition
+Mx0
+Youkai Kyoushitsu
+Horimiya
+Mieruko-chan
+Drifters
+Suzuka
+The Iceblade Magician Rules Over the World
+Kaiju No. 8
+Yu-Gi-Oh!
+"A Story About Treating a Female Knight, Who Has Never Been Treated as a Woman, as a Woman"
+Mahoutsukai to Deshi no Futekisetsu na Kankei
+Battle Royale
+Mato Seihei no Slave
+One-Punch Man
+Boku No Kokoro No Yabai Yatsu
+Doku Mushi
+Kuzu no Honkai
+Hoshihimemura No Naishobanashi
+Knights of Sidonia
+Amaenaideyo
+Kono Subarashii Sekai ni Shukufuku wo! Spin-off Kono Kamen no Akuma ni Soudan wo!
+Killing Bites
+Fly Me to the Moon
+Tenjo Tenge
+D-Princess
+7thGARDEN
+Sumomomo Momomo
+Accel World Dural - Magisa Garden
+History's Strongest Disciple Kenichi
+Future Diary - Mosaic
+DEAD Tube
+Kaworu Watashiya - Kodomo no Jikan
+Undead Unluck
+Black Bullet
+Fureru To Kikoeru
+Konchuki
+Akuma no Riddle - Riddle Story of Devil
+Great Teacher Onizuka
+Scumbag Loser
+Jisatsutou
+Boku wa Mari no Naka
+Cherry x Cherry
+Seraph of the End - Vampire Reign 093
+Yumekui Merry - 4-Koma Anthology
+Love and Lies
+Nisekoi - False Love
+Another
+My Balls
+Akame ga KILL!
+Corpse Princess
+Needless 0
+My Charms Are Wasted On Kuroiwa Medaka
+Made in Abyss
+Hanako to Guuwa no Tera
+Yumekui Merry
+Miman Renai
+Sundome
+Gantz
+Accomplishments of the Duke's Daughter
+Grimgar of Fantasy and Ash
+Dansei Kyoufushou Datta Watashi Ga Av Jouyu Ni Naru Made No Hanashi
+Hour of the Zombie
+NOiSE
+Onani Master Kurosawa
+Sekirei
+Full Metal Panic
+Zero no Tsukaima
+Solo Leveling
+B Gata H Kei
+Shurabara!
+DEATH NOTE
+Terra Formars
+Goblin Slayer
+March Story
+Nozoki Ana
+Youkai Shoujo - Monsuga
+Maji de Watashi ni Koi Shinasai!!
+"Ore no Nounai Sentakushi ga, Gakuen Rabukome o Zenryoku de Jama Shite Iru H"
+Destruction Princess
+Mob Psycho 100
+Negima!
+Zero - The Illust collection of The Familiar of Zero
+20th Century Boys
+Girls of the Wild's
+Bleach - Digital Colored Comics
+Taboo Tattoo
+Let's Buy The Land And Cultivate In Different World
+Oroka na Tenshi wa Akuma to Odoru
+Future Diary
+Negima! Party Book!
+Buso Renkin
+Offal Island
+Mysterious Girlfriend X
+Getsurin ni Kiri Saku
+Magi
+Uzaki-chan Wants to Hang Out!
+A Town Where You Live
+WITCH WATCH
+Lord Marksman and Vanadis
+Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo
+Tonari No Furi-San Ga Tonikaku Kowai
+Hinowa ga CRUSH!
+Tsuredure Children
+Dance in the Vampire Bund
+Sperman
+The Rising Of The Shield Hero
+Triage X
+Kiruru Kill Me
+Hidan no Aria AA
+Origin
+Senran Kagura - Skirting Shadows
+Higurashi no Naku Koro ni - Himatsubushihen
+APOSIMZ
+Franken Fran
+Is This a Zombie
+School Rumble
+Darker than Black - Shikkoku no Hana
+Sweet X Trouble
+Close As Neighbors
+7SEEDS
+Dungeon Seeker
+Necromance
+Code Breaker
+Rokka Braves of the Six Flowers
+Prison School
+COPPELION
+Grand Blue Dreaming
+Libidors
+Skill of Lure
+Pluto - Urasawa x Tezuka
+Chibi Vampire
+Omamori Himari
+"Zoku, Kono Subarashii Sekai ni Bakuen wo!"
+"Please Go Home, Akutsu-San!"
+Mahoutsukai to Teishi no Futekisetsu na Kankei
+Chobits
+The Seven Deadly Sins
+Black Clover
+We Never Learn
+Tomogui Kyoushitsu
+Tokyo Ghoul
+Sweat and Soap
+Seraph of the End - Vampire Reign 097
+Higurashi no Naku Koro ni Kai - Meakashihen
+Children
+"Can You Just Die, My Darling"
+"Haganai, I Don't Have Many Friends"
+Heion Sedai no Idaten-tachi
+Baketeriya
+Magical Sempai
+Ajin - Demi-Human
+Kimi wa Midara na Boku no Joou
+DearS
+Pluto
+Lotte no Omocha!
+Love Hina
+Shoujo Kaitai
+El Cazador de la Bruja
+Akame ga KILL! ZERO
+"Beauty, Sage And The Devil's Sword"
+Higurashi no Naku Koro ni - Watanagashihen
+Corpse Party - Musume
+Getsuyoubi no Tawawa
+Trinity Seven
+"No Game, No Life"
+KanoKari Mythology
+Seraph of the End - Vampire Reign 094
+Uzumaki
+Darling in the FranXX
+The Blade Of Evolution-Walking Alone In The Dungeon
+BLAME! Master Edition
+Fire Force
+Toukyou Akazukin
+Darker than Black
+Karin
+Higurashi no Naku Koro ni Kai - Matsuribayashihen
+Akazukin
+Velvet Kiss
+"Kanojo, Okarishimasu"
+Teasing Master Takagi-san
+The Hentai Prince and the Stony Cat
+Corpse Party - Book of Shadows
+.hackxxxx
+Hachigatsu Kokonoka Boku wa Kimi ni Kuwareru.
+Corpse Party - Blood Covered
+King Of Thorn
+BTOOOM!
+Chimamire Sukeban Chainsaw
+Seraph of the End - Vampire Reign
+Juni Taisen Zodiac War
+Masamune-kun's Revenge
+How Many Light-Years to Babylon
+Midori no Hibi
+A Girl on the Shore
+Plunderer
+School Rumble - Pleasure File
+Green WorldZ
+Golden Boy
+Yuusha ga Shinda!
+Kodomo no Jikan
+unOrdinary
+My Wife is Wagatsuma-san
+VanDread
+Rosario+Vampire
+Kyochuu Rettou
+Deadman Wonderland
+KILL la KILL
+Mushoku Tensei - Jobless Reincarnation
+404 Case Manual 30 Seconds Till Apocalypse
+Iris Zero
+All You Need is Kill
+Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen
+High School DxD
+Needless
+Ichiban no Daimaou
+My Girlfriend Is A Zombie
+Hare-Kon
+Minamoto-kun Monogatari
+Batman Beyond 02
+Spawn
+iZombie
+Invincible 070.5 - Invincible Returns
+Invincible Presents - Atom Eve
+Invincible 033.5 - Marvel Team-Up
+Invincible 031.5 - Image - Future Shock
+Batman Wayne Family Adventures
+Batman Beyond 04
+Batman Beyond 2.0
+Batman Beyond 03
+Batman Beyond 05
+Chew
+Zombie Tramp vs. Vampblade TPB
+Free Scott Pilgrim
+Invincible Presents - Atom Eve & Rex Splode
+Scott Pilgrim 03 - Scott Pilgrim & The Infinite Sadness
+I Hate Fairyland
+Scott Pilgrim 06 - Scott Pilgrim's Finest Hour
+Scott Pilgrim 04 - Scott Pilgrim Gets It Together
+Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life
+Spawn - 25th Anniversary Director's Cut
+Zombie Tramp
+Invincible Universe
+The Official Handbook of the Invincible Universe
+Batman Beyond
+Saga
+Scott Pilgrim 05 - Scott Pilgrim vs. the Universe
+Batman Beyond 06
+Batman - Detective Comics - Rebirth Deluxe Edition Book
+Batman Beyond 01
+Batman - Catwoman
+Invincible 022.5 - Invincible
+Teen Titans - Raven
+Invincible 052
+Invincible 014.5 - Image Comics Summer
+Zombie Tramp v3 TPB
+Scott Pilgrim 02 - Scott Pilgrim vs. The World
+Invincible
+Spawn 220
+Y - The Last Man
+Kick-Ass - The Dave Lizewski Years
+Teen Titans
+Fables
+Book of Enoch
+To Love-Ru Darkness - Digital Colored Comics
+Medaka Box - Digital Colored Comics
+Magical P tissi re Kosaki-chan!!
+Pandora in the Crimson Shell - Ghost Urn
+Yuragi-sou no Yuuna-san - Digital Colored Comics
+Ziggurat
+Tsugumomo - Digital Colored Comics
+The War Poems Of Siegfried Sassoon
+Rokka - Braves of the Six Flowers
+Demon King Daimaou
+Blockade Billy
+Cujo
+The Magicians
+The Gunslinger
+Danse Macabre
+Christine
+Fool moon
+On Writing
+Roadwork
+Deep Learning with Python - A Hands-on Introduction
+If It Bleeds
+Night Shift
+Bag of Bones
+Dreamcatcher
+Desperation
+Duma Key
+Four Past Midnight
+Elevation
+The Colorado Kid
+The Eyes of the Dragon
+Consulting With This Masked Devil!
+Gifting the Wonderful World with Blessings!
+The Golden Harpoon / Lost Among the Floes
+Invaders of the Rokujouma
+Cell
+Uncollected Stories 2003
+Faithful
+"Full Dark, No Stars"
+Dolores Claiborne
+It
+Antonio's Tale
+Joyland
+konosuba
+CSHP19
+By the Grace of the Gods - LN
+EPUB 3 Collection
+Talisman
+Sword Art Online
+The Mist
+Insomnia
+Hearts In Atlantis
+11/22/63
+Kono Subarashii Sekai ni Bakuen wo!
+In the Tall Grass
+Nightmares and Dreamscapes
+Eloquent JavaScript
+The Bell Jar
+Six Stories
+Rose Madder
+The Stand
+The Devil Is a Part-Timer!
+Grimgar of Fantasy and Ash
+A Chronicle of the Last Pagans
+Cycle of the Werewolf
+Gifting this Wonderful World With Blessings!
+Unit 1. Operations on Numbers.
+Firestarter
+The Dark Half
+Accel World
+Love Hina - Volume
+Skeleton Crew
+Needful Things
+Kono Subarashii Sekai ni Syukufuku wo!
+Carrie
+Thinner
+Hentai Ouji to Warawanai Neko
+Blaze
+Saturn Run
+Throttle
+Just After Sunset
+Gerald's Game
+The Regulators
+Different Seasons
+The Dark Tower
+Pet Sematary
+The Girl Who Loved Tom Gordon
+Ano Orokamono ni mo Kyakkou wo!
+From A Buick 8
+The Green Mile
+"Celebration of Discipline, Special Anniversary Edition"
+Combatants Will Be Dispatched!
+Kore Wa Zombie Desu Ka
+The Shining
+The Tatami Galaxy
+Salem's Lot
+The Tommyknockers
+A Face in the Crowd
+UR
+この素晴らしい世界に祝福を! 9 紅の宿命 【電子特別版】
+Outsider
+Lisey's Story
+Everything's Eventual
+Dune
+The Dead Zone
+Mile 81
+Under the Dome
+The Long Walk
+The Running Man
+EPUB3 UNLEASHED 2012
+Gifting The Wonderful World With Explosions!
+Rage
diff --git a/API.Benchmark/ParserBenchmarks.cs b/API.Benchmark/ParserBenchmarks.cs
new file mode 100644
index 000000000..8eaa70a28
--- /dev/null
+++ b/API.Benchmark/ParserBenchmarks.cs
@@ -0,0 +1,89 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text.RegularExpressions;
+using BenchmarkDotNet.Attributes;
+using BenchmarkDotNet.Order;
+
+namespace API.Benchmark
+{
+ [MemoryDiagnoser]
+ [Orderer(SummaryOrderPolicy.FastestToSlowest)]
+ [RankColumn]
+ public class ParserBenchmarks
+ {
+ private readonly IList _names;
+
+ private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled,
+ TimeSpan.FromMilliseconds(300));
+
+ private static readonly Regex IsEpub = new Regex(@"\.epub",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled,
+ TimeSpan.FromMilliseconds(300));
+
+ public ParserBenchmarks()
+ {
+ // Read all series from SeriesNamesForNormalization.txt
+ _names = File.ReadAllLines("Data/SeriesNamesForNormalization.txt");
+ Console.WriteLine($"Performing benchmark on {_names.Count} series");
+ }
+
+ private static void NormalizeOriginal(string name)
+ {
+ Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty);
+ }
+
+ private static void NormalizeNew(string name)
+ {
+ NormalizeRegex.Replace(name, string.Empty).ToLower();
+ }
+
+
+ [Benchmark]
+ public void TestNormalizeName()
+ {
+ foreach (var name in _names)
+ {
+ NormalizeOriginal(name);
+ }
+ }
+
+
+ [Benchmark]
+ public void TestNormalizeName_New()
+ {
+ foreach (var name in _names)
+ {
+ NormalizeNew(name);
+ }
+ }
+
+ [Benchmark]
+ public void TestIsEpub()
+ {
+ foreach (var name in _names)
+ {
+ if ((name + ".epub").ToLower() == ".epub")
+ {
+ /* No Operation */
+ }
+ }
+ }
+
+ [Benchmark]
+ public void TestIsEpub_New()
+ {
+ foreach (var name in _names)
+ {
+
+ if (IsEpub.IsMatch((name + ".epub")))
+ {
+ /* No Operation */
+ }
+ }
+ }
+
+
+ }
+}
diff --git a/API.Benchmark/Program.cs b/API.Benchmark/Program.cs
index b308a07b7..f12146a7f 100644
--- a/API.Benchmark/Program.cs
+++ b/API.Benchmark/Program.cs
@@ -12,8 +12,10 @@ namespace API.Benchmark
{
static void Main(string[] args)
{
- BenchmarkRunner.Run();
+ //BenchmarkRunner.Run();
//BenchmarkRunner.Run();
+ BenchmarkRunner.Run();
+
}
}
}
diff --git a/API.Tests/Parser/ParserTest.cs b/API.Tests/Parser/ParserTest.cs
index 6830cde0d..ac1e506bc 100644
--- a/API.Tests/Parser/ParserTest.cs
+++ b/API.Tests/Parser/ParserTest.cs
@@ -54,7 +54,7 @@ namespace API.Tests.Parser
// public void ReplaceStyleUrlTest(string input, string expected)
// {
// var replacementStr = "PaytoneOne.ttf";
- // // TODO: Use Match to validate since replace is weird
+ // // Use Match to validate since replace is weird
// //Assert.Equal(expected, FontSrcUrlRegex.Replace(input, "$1" + replacementStr + "$2" + "$3"));
// var match = FontSrcUrlRegex.Match(input);
// Assert.Equal(!string.IsNullOrEmpty(expected), FontSrcUrlRegex.Match(input).Success);
diff --git a/API.Tests/Services/ScannerServiceTests.cs b/API.Tests/Services/ScannerServiceTests.cs
index 93b254c8e..0253ccef6 100644
--- a/API.Tests/Services/ScannerServiceTests.cs
+++ b/API.Tests/Services/ScannerServiceTests.cs
@@ -111,7 +111,7 @@ namespace API.Tests.Services
- Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos));
+ Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
}
diff --git a/API/Controllers/DownloadController.cs b/API/Controllers/DownloadController.cs
index 3000e1f22..d5080846a 100644
--- a/API/Controllers/DownloadController.cs
+++ b/API/Controllers/DownloadController.cs
@@ -63,7 +63,7 @@ namespace API.Controllers
public async Task DownloadVolume(int volumeId)
{
var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId);
- var volume = await _unitOfWork.SeriesRepository.GetVolumeByIdAsync(volumeId);
+ var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(volumeId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
try
{
@@ -92,7 +92,7 @@ namespace API.Controllers
{
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId);
- var volume = await _unitOfWork.SeriesRepository.GetVolumeByIdAsync(chapter.VolumeId);
+ var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(chapter.VolumeId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
try
{
diff --git a/API/Controllers/LibraryController.cs b/API/Controllers/LibraryController.cs
index 25f224a28..07a4a3f97 100644
--- a/API/Controllers/LibraryController.cs
+++ b/API/Controllers/LibraryController.cs
@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
+using API.Data.Repositories;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
@@ -179,7 +180,7 @@ namespace API.Controllers
try
{
- var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_unitOfWork.LibraryRepository.Delete(library);
await _unitOfWork.CommitAsync();
@@ -203,7 +204,7 @@ namespace API.Controllers
[HttpPost("update")]
public async Task UpdateLibrary(UpdateLibraryDto libraryForUserDto)
{
- var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id);
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id, LibraryIncludes.Folders);
var originalFolders = library.Folders.Select(x => x.Path).ToList();
diff --git a/API/Controllers/OPDSController.cs b/API/Controllers/OPDSController.cs
index ef83c2a69..d1359a9e5 100644
--- a/API/Controllers/OPDSController.cs
+++ b/API/Controllers/OPDSController.cs
@@ -467,7 +467,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
- var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId);
+ var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId);
var feed = CreateFeed(series.Name + " - Volumes", $"{apiKey}/series/{series.Id}", apiKey);
feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesId}"));
foreach (var volumeDto in volumes)
@@ -486,7 +486,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
- var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId);
+ var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
var chapters =
(await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number),
_chapterSortComparer);
@@ -517,7 +517,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
- var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId);
+ var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId);
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
diff --git a/API/Controllers/ReaderController.cs b/API/Controllers/ReaderController.cs
index d1314674e..5b18cfb98 100644
--- a/API/Controllers/ReaderController.cs
+++ b/API/Controllers/ReaderController.cs
@@ -97,7 +97,7 @@ namespace API.Controllers
public async Task MarkRead(MarkReadDto markReadDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
- var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId);
+ var volumes = await _unitOfWork.VolumeRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List();
foreach (var volume in volumes)
{
@@ -125,7 +125,7 @@ namespace API.Controllers
public async Task MarkUnread(MarkReadDto markReadDto)
{
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
- var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId);
+ var volumes = await _unitOfWork.VolumeRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List();
foreach (var volume in volumes)
{
@@ -267,7 +267,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
user.Progresses ??= new List();
- var volumes = await _unitOfWork.SeriesRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
+ var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
foreach (var volume in volumes)
{
_readerService.MarkChaptersAsRead(user, volume.SeriesId, volume.Chapters);
@@ -294,7 +294,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
user.Progresses ??= new List();
- var volumes = await _unitOfWork.SeriesRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
+ var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
foreach (var volume in volumes)
{
_readerService.MarkChaptersAsUnread(user, volume.SeriesId, volume.Chapters);
diff --git a/API/Controllers/SeriesController.cs b/API/Controllers/SeriesController.cs
index ff0fa7587..81ea6a6ff 100644
--- a/API/Controllers/SeriesController.cs
+++ b/API/Controllers/SeriesController.cs
@@ -97,14 +97,14 @@ namespace API.Controllers
public async Task>> GetVolumes(int seriesId)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
- return Ok(await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId));
+ return Ok(await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId));
}
[HttpGet("volume")]
public async Task> GetVolume(int volumeId)
{
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
- return Ok(await _unitOfWork.SeriesRepository.GetVolumeDtoAsync(volumeId, userId));
+ return Ok(await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(volumeId, userId));
}
[HttpGet("chapter")]
@@ -217,7 +217,7 @@ namespace API.Controllers
[HttpPost("refresh-metadata")]
public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto)
{
- _taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId);
+ _taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, true);
return Ok();
}
diff --git a/API/Controllers/UploadController.cs b/API/Controllers/UploadController.cs
index 4241a8bc6..43c9b8d09 100644
--- a/API/Controllers/UploadController.cs
+++ b/API/Controllers/UploadController.cs
@@ -148,7 +148,7 @@ namespace API.Controllers
chapter.CoverImage = filePath;
chapter.CoverImageLocked = true;
_unitOfWork.ChapterRepository.Update(chapter);
- var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId);
+ var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId);
volume.CoverImage = chapter.CoverImage;
_unitOfWork.VolumeRepository.Update(volume);
}
@@ -185,7 +185,7 @@ namespace API.Controllers
chapter.CoverImage = string.Empty;
chapter.CoverImageLocked = false;
_unitOfWork.ChapterRepository.Update(chapter);
- var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId);
+ var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId);
volume.CoverImage = chapter.CoverImage;
_unitOfWork.VolumeRepository.Update(volume);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
diff --git a/API/Controllers/UsersController.cs b/API/Controllers/UsersController.cs
index c35e368cc..2c83a1267 100644
--- a/API/Controllers/UsersController.cs
+++ b/API/Controllers/UsersController.cs
@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
+using API.Data.Repositories;
using API.DTOs;
using API.Extensions;
using API.Interfaces;
@@ -41,8 +42,8 @@ namespace API.Controllers
[HttpGet("has-reading-progress")]
public async Task> HasReadingProgress(int libraryId)
{
- var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, userId));
}
diff --git a/API/Data/Repositories/LibraryRepository.cs b/API/Data/Repositories/LibraryRepository.cs
index 7f3544aee..caae93dd6 100644
--- a/API/Data/Repositories/LibraryRepository.cs
+++ b/API/Data/Repositories/LibraryRepository.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.DTOs;
@@ -11,6 +12,17 @@ using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories
{
+
+ [Flags]
+ public enum LibraryIncludes
+ {
+ None = 1,
+ Series = 2,
+ AppUser = 4,
+ Folders = 8,
+ // Ratings = 16
+ }
+
public class LibraryRepository : ILibraryRepository
{
private readonly DataContext _context;
@@ -58,7 +70,7 @@ namespace API.Data.Repositories
public async Task DeleteLibrary(int libraryId)
{
- var library = await GetLibraryForIdAsync(libraryId);
+ var library = await GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.Series);
_context.Library.Remove(library);
return await _context.SaveChangesAsync() > 0;
}
@@ -91,14 +103,37 @@ namespace API.Data.Repositories
.ToListAsync();
}
- public async Task GetLibraryForIdAsync(int libraryId)
+ public async Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes)
{
- return await _context.Library
- .Where(x => x.Id == libraryId)
- .Include(f => f.Folders)
- .Include(l => l.Series)
- .SingleAsync();
+
+ var query = _context.Library
+ .Where(x => x.Id == libraryId);
+
+ query = AddIncludesToQuery(query, includes);
+ return await query.SingleAsync();
}
+
+ private static IQueryable AddIncludesToQuery(IQueryable query, LibraryIncludes includeFlags)
+ {
+ if (includeFlags.HasFlag(LibraryIncludes.Folders))
+ {
+ query = query.Include(l => l.Folders);
+ }
+
+ if (includeFlags.HasFlag(LibraryIncludes.Series))
+ {
+ query = query.Include(l => l.Series);
+ }
+
+ if (includeFlags.HasFlag(LibraryIncludes.AppUser))
+ {
+ query = query.Include(l => l.AppUsers);
+ }
+
+ return query;
+ }
+
+
///
/// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed.
///
@@ -106,7 +141,6 @@ namespace API.Data.Repositories
///
public async Task GetFullLibraryForIdAsync(int libraryId)
{
-
return await _context.Library
.Where(x => x.Id == libraryId)
.Include(f => f.Folders)
diff --git a/API/Data/Repositories/SeriesRepository.cs b/API/Data/Repositories/SeriesRepository.cs
index 3ed415859..842b0767e 100644
--- a/API/Data/Repositories/SeriesRepository.cs
+++ b/API/Data/Repositories/SeriesRepository.cs
@@ -1,15 +1,16 @@
using System;
using System.Collections.Generic;
-using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
+using API.Data.Scanner;
using API.DTOs;
using API.DTOs.Filtering;
using API.Entities;
using API.Extensions;
using API.Helpers;
using API.Interfaces.Repositories;
+using API.Services.Tasks;
using AutoMapper;
using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore;
@@ -26,9 +27,9 @@ namespace API.Data.Repositories
_mapper = mapper;
}
- public void Add(Series series)
+ public void Attach(Series series)
{
- _context.Series.Add(series);
+ _context.Series.Attach(series);
}
public void Update(Series series)
@@ -36,19 +37,9 @@ namespace API.Data.Repositories
_context.Entry(series).State = EntityState.Modified;
}
- public async Task SaveAllAsync()
+ public void Remove(Series series)
{
- return await _context.SaveChangesAsync() > 0;
- }
-
- public bool SaveAll()
- {
- return _context.SaveChanges() > 0;
- }
-
- public async Task GetSeriesByNameAsync(string name)
- {
- return await _context.Series.SingleOrDefaultAsync(x => x.Name == name);
+ _context.Series.Remove(series);
}
public async Task DoesSeriesNameExistInLibrary(string name)
@@ -64,11 +55,6 @@ namespace API.Data.Repositories
.CountAsync() > 1;
}
- public Series GetSeriesByName(string name)
- {
- return _context.Series.SingleOrDefault(x => x.Name == name);
- }
-
public async Task> GetSeriesForLibraryIdAsync(int libraryId)
{
return await _context.Series
@@ -77,6 +63,43 @@ namespace API.Data.Repositories
.ToListAsync();
}
+ ///
+ /// Used for to
+ ///
+ ///
+ ///
+ public async Task> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams)
+ {
+ var query = _context.Series
+ .Where(s => s.LibraryId == libraryId)
+ .Include(s => s.Metadata)
+ .Include(s => s.Volumes)
+ .ThenInclude(v => v.Chapters)
+ .ThenInclude(c => c.Files)
+ .AsSplitQuery()
+ .OrderBy(s => s.SortName);
+
+ return await PagedList.CreateAsync(query, userParams.PageNumber, userParams.PageSize);
+ }
+
+ ///
+ /// This is a heavy call. Returns all entities down to Files and Library and Series Metadata.
+ ///
+ ///
+ ///
+ public async Task GetFullSeriesForSeriesIdAsync(int seriesId)
+ {
+ return await _context.Series
+ .Where(s => s.Id == seriesId)
+ .Include(s => s.Metadata)
+ .Include(s => s.Library)
+ .Include(s => s.Volumes)
+ .ThenInclude(v => v.Chapters)
+ .ThenInclude(c => c.Files)
+ .AsSplitQuery()
+ .SingleOrDefaultAsync();
+ }
+
public async Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter)
{
var formats = filter.GetSqlFilter();
@@ -103,41 +126,12 @@ namespace API.Data.Repositories
.ToListAsync();
}
- public async Task> GetVolumesDtoAsync(int seriesId, int userId)
- {
- var volumes = await _context.Volume
- .Where(vol => vol.SeriesId == seriesId)
- .Include(vol => vol.Chapters)
- .OrderBy(volume => volume.Number)
- .ProjectTo(_mapper.ConfigurationProvider)
- .AsNoTracking()
- .ToListAsync();
-
- await AddVolumeModifiers(userId, volumes);
- SortSpecialChapters(volumes);
-
- return volumes;
- }
-
- private static void SortSpecialChapters(IEnumerable volumes)
- {
- var sorter = new NaturalSortComparer();
- foreach (var v in volumes.Where(vDto => vDto.Number == 0))
- {
- v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
- }
- }
- public async Task> GetVolumes(int seriesId)
- {
- return await _context.Volume
- .Where(vol => vol.SeriesId == seriesId)
- .Include(vol => vol.Chapters)
- .ThenInclude(c => c.Files)
- .OrderBy(vol => vol.Number)
- .ToListAsync();
- }
+
+
+
+
public async Task GetSeriesDtoByIdAsync(int seriesId, int userId)
{
@@ -151,55 +145,8 @@ namespace API.Data.Repositories
return seriesList[0];
}
- public async Task GetVolumeAsync(int volumeId)
- {
- return await _context.Volume
- .Include(vol => vol.Chapters)
- .ThenInclude(c => c.Files)
- .SingleOrDefaultAsync(vol => vol.Id == volumeId);
- }
- public async Task GetVolumeDtoAsync(int volumeId)
- {
- return await _context.Volume
- .Where(vol => vol.Id == volumeId)
- .AsNoTracking()
- .ProjectTo(_mapper.ConfigurationProvider)
- .SingleAsync();
- }
-
- public async Task GetVolumeDtoAsync(int volumeId, int userId)
- {
- var volume = await _context.Volume
- .Where(vol => vol.Id == volumeId)
- .Include(vol => vol.Chapters)
- .ThenInclude(c => c.Files)
- .ProjectTo(_mapper.ConfigurationProvider)
- .SingleAsync(vol => vol.Id == volumeId);
-
- var volumeList = new List() {volume};
- await AddVolumeModifiers(userId, volumeList);
-
- return volumeList[0];
- }
-
- ///
- /// Returns all volumes that contain a seriesId in passed array.
- ///
- ///
- ///
- public async Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false)
- {
- var query = _context.Volume
- .Where(v => seriesIds.Contains(v.SeriesId));
-
- if (includeChapters)
- {
- query = query.Include(v => v.Chapters);
- }
- return await query.ToListAsync();
- }
public async Task DeleteSeriesAsync(int seriesId)
{
@@ -209,11 +156,12 @@ namespace API.Data.Repositories
return await _context.SaveChangesAsync() > 0;
}
- public async Task GetVolumeByIdAsync(int volumeId)
- {
- return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
- }
+ ///
+ /// Returns Volumes, Metadata, and Collection Tags
+ ///
+ ///
+ ///
public async Task GetSeriesByIdAsync(int seriesId)
{
return await _context.Series
@@ -244,7 +192,7 @@ namespace API.Data.Repositories
}
///
- /// This returns a list of tuples back for each series id passed
+ /// This returns a dictonary mapping seriesId -> list of chapters back for each series id passed
///
///
///
@@ -301,24 +249,7 @@ namespace API.Data.Repositories
.SingleOrDefaultAsync();
}
- private async Task AddVolumeModifiers(int userId, IReadOnlyCollection volumes)
- {
- var volIds = volumes.Select(s => s.Id);
- var userProgress = await _context.AppUserProgresses
- .Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId))
- .AsNoTracking()
- .ToListAsync();
- foreach (var v in volumes)
- {
- foreach (var c in v.Chapters)
- {
- c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead);
- }
-
- v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead);
- }
- }
///
/// Returns a list of Series that were added, ordered by Created desc
@@ -497,5 +428,63 @@ namespace API.Data.Repositories
.AsNoTracking()
.ToListAsync();
}
+
+ ///
+ /// Returns the number of series for a given library (or all libraries if libraryId is 0)
+ ///
+ /// Defaults to 0, library to restrict count to
+ ///
+ private async Task GetSeriesCount(int libraryId = 0)
+ {
+ if (libraryId > 0)
+ {
+ return await _context.Series
+ .Where(s => s.LibraryId == libraryId)
+ .CountAsync();
+ }
+ return await _context.Series.CountAsync();
+ }
+
+ ///
+ /// Returns the number of series that should be processed in parallel to optimize speed and memory. Minimum of 50
+ ///
+ /// Defaults to 0 meaning no library
+ ///
+ private async Task> GetChunkSize(int libraryId = 0)
+ {
+ // TODO: Think about making this bigger depending on number of files a user has in said library
+ // and number of cores and amount of memory. We can then make an optimal choice
+ var totalSeries = await GetSeriesCount(libraryId);
+ var procCount = Math.Max(Environment.ProcessorCount - 1, 1);
+
+ if (totalSeries < procCount * 2 || totalSeries < 50)
+ {
+ return new Tuple(totalSeries, totalSeries);
+ }
+
+
+ return new Tuple(totalSeries, Math.Max(totalSeries / procCount, 50));
+ }
+
+ public async Task GetChunkInfo(int libraryId = 0)
+ {
+ var (totalSeries, chunkSize) = await GetChunkSize(libraryId);
+
+ if (totalSeries == 0) return new Chunk()
+ {
+ TotalChunks = 0,
+ TotalSize = 0,
+ ChunkSize = 0
+ };
+
+ var totalChunks = Math.Max((int) Math.Ceiling((totalSeries * 1.0) / chunkSize), 1);
+
+ return new Chunk()
+ {
+ TotalSize = totalSeries,
+ ChunkSize = chunkSize,
+ TotalChunks = totalChunks
+ };
+ }
}
}
diff --git a/API/Data/Repositories/VolumeRepository.cs b/API/Data/Repositories/VolumeRepository.cs
index d991a928c..339da798d 100644
--- a/API/Data/Repositories/VolumeRepository.cs
+++ b/API/Data/Repositories/VolumeRepository.cs
@@ -1,9 +1,8 @@
using System.Collections.Generic;
-using System.IO;
using System.Linq;
using System.Threading.Tasks;
+using API.Comparators;
using API.DTOs;
-using API.DTOs.Reader;
using API.Entities;
using API.Interfaces.Repositories;
using AutoMapper;
@@ -15,10 +14,17 @@ namespace API.Data.Repositories
public class VolumeRepository : IVolumeRepository
{
private readonly DataContext _context;
+ private readonly IMapper _mapper;
- public VolumeRepository(DataContext context)
+ public VolumeRepository(DataContext context, IMapper mapper)
{
_context = context;
+ _mapper = mapper;
+ }
+
+ public void Add(Volume volume)
+ {
+ _context.Volume.Add(volume);
}
public void Update(Volume volume)
@@ -26,6 +32,16 @@ namespace API.Data.Repositories
_context.Entry(volume).State = EntityState.Modified;
}
+ public void Remove(Volume volume)
+ {
+ _context.Volume.Remove(volume);
+ }
+
+ ///
+ /// Returns a list of non-tracked files for a given volume.
+ ///
+ ///
+ ///
public async Task> GetFilesForVolume(int volumeId)
{
return await _context.Chapter
@@ -36,6 +52,11 @@ namespace API.Data.Repositories
.ToListAsync();
}
+ ///
+ /// Returns the cover image file for the given volume
+ ///
+ ///
+ ///
public async Task GetVolumeCoverImageAsync(int volumeId)
{
return await _context.Volume
@@ -45,6 +66,11 @@ namespace API.Data.Repositories
.SingleOrDefaultAsync();
}
+ ///
+ /// Returns all chapter Ids belonging to a list of Volume Ids
+ ///
+ ///
+ ///
public async Task> GetChapterIdsByVolumeIds(IReadOnlyList volumeIds)
{
return await _context.Chapter
@@ -52,5 +78,131 @@ namespace API.Data.Repositories
.Select(c => c.Id)
.ToListAsync();
}
+
+ ///
+ /// Returns all volumes that contain a seriesId in passed array.
+ ///
+ ///
+ ///
+ public async Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false)
+ {
+ var query = _context.Volume
+ .Where(v => seriesIds.Contains(v.SeriesId));
+
+ if (includeChapters)
+ {
+ query = query.Include(v => v.Chapters);
+ }
+ return await query.ToListAsync();
+ }
+
+ ///
+ /// Returns an individual Volume including Chapters and Files and Reading Progress for a given volumeId
+ ///
+ ///
+ ///
+ ///
+ public async Task GetVolumeDtoAsync(int volumeId, int userId)
+ {
+ var volume = await _context.Volume
+ .Where(vol => vol.Id == volumeId)
+ .Include(vol => vol.Chapters)
+ .ThenInclude(c => c.Files)
+ .ProjectTo(_mapper.ConfigurationProvider)
+ .SingleAsync(vol => vol.Id == volumeId);
+
+ var volumeList = new List() {volume};
+ await AddVolumeModifiers(userId, volumeList);
+
+ return volumeList[0];
+ }
+
+ ///
+ /// Returns the full Volumes including Chapters and Files for a given series
+ ///
+ ///
+ ///
+ public async Task> GetVolumes(int seriesId)
+ {
+ return await _context.Volume
+ .Where(vol => vol.SeriesId == seriesId)
+ .Include(vol => vol.Chapters)
+ .ThenInclude(c => c.Files)
+ .OrderBy(vol => vol.Number)
+ .ToListAsync();
+ }
+
+ ///
+ /// Returns a single volume with Chapter and Files
+ ///
+ ///
+ ///
+ public async Task GetVolumeAsync(int volumeId)
+ {
+ return await _context.Volume
+ .Include(vol => vol.Chapters)
+ .ThenInclude(c => c.Files)
+ .SingleOrDefaultAsync(vol => vol.Id == volumeId);
+ }
+
+
+ ///
+ /// Returns all volumes for a given series with progress information attached. Includes all Chapters as well.
+ ///
+ ///
+ ///
+ ///
+ public async Task> GetVolumesDtoAsync(int seriesId, int userId)
+ {
+ var volumes = await _context.Volume
+ .Where(vol => vol.SeriesId == seriesId)
+ .Include(vol => vol.Chapters)
+ .OrderBy(volume => volume.Number)
+ .ProjectTo(_mapper.ConfigurationProvider)
+ .AsNoTracking()
+ .ToListAsync();
+
+ await AddVolumeModifiers(userId, volumes);
+ SortSpecialChapters(volumes);
+
+ return volumes;
+ }
+
+ public async Task GetVolumeByIdAsync(int volumeId)
+ {
+ return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
+ }
+
+
+ private static void SortSpecialChapters(IEnumerable volumes)
+ {
+ var sorter = new NaturalSortComparer();
+ foreach (var v in volumes.Where(vDto => vDto.Number == 0))
+ {
+ v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
+ }
+ }
+
+
+ private async Task AddVolumeModifiers(int userId, IReadOnlyCollection volumes)
+ {
+ var volIds = volumes.Select(s => s.Id);
+ var userProgress = await _context.AppUserProgresses
+ .Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId))
+ .AsNoTracking()
+ .ToListAsync();
+
+ foreach (var v in volumes)
+ {
+ foreach (var c in v.Chapters)
+ {
+ c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead);
+ }
+
+ v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead);
+ }
+ }
+
+
}
}
diff --git a/API/Data/Scanner/Chunk.cs b/API/Data/Scanner/Chunk.cs
new file mode 100644
index 000000000..9a9e04f5c
--- /dev/null
+++ b/API/Data/Scanner/Chunk.cs
@@ -0,0 +1,21 @@
+namespace API.Data.Scanner
+{
+ ///
+ /// Represents a set of Entities which is broken up and iterated on
+ ///
+ public class Chunk
+ {
+ ///
+ /// Total number of entities
+ ///
+ public int TotalSize { get; set; }
+ ///
+ /// Size of each chunk to iterate over
+ ///
+ public int ChunkSize { get; set; }
+ ///
+ /// Total chunks to iterate over
+ ///
+ public int TotalChunks { get; set; }
+ }
+}
diff --git a/API/Data/UnitOfWork.cs b/API/Data/UnitOfWork.cs
index 017293be0..64f9c4fe0 100644
--- a/API/Data/UnitOfWork.cs
+++ b/API/Data/UnitOfWork.cs
@@ -25,7 +25,7 @@ namespace API.Data
public IUserRepository UserRepository => new UserRepository(_context, _userManager, _mapper);
public ILibraryRepository LibraryRepository => new LibraryRepository(_context, _mapper);
- public IVolumeRepository VolumeRepository => new VolumeRepository(_context);
+ public IVolumeRepository VolumeRepository => new VolumeRepository(_context, _mapper);
public ISettingsRepository SettingsRepository => new SettingsRepository(_context, _mapper);
diff --git a/API/Entities/FolderPath.cs b/API/Entities/FolderPath.cs
index dab3d86cd..267564fe8 100644
--- a/API/Entities/FolderPath.cs
+++ b/API/Entities/FolderPath.cs
@@ -8,12 +8,12 @@ namespace API.Entities
public int Id { get; set; }
public string Path { get; set; }
///
- /// Used when scanning to see if we can skip if nothing has changed.
+ /// Used when scanning to see if we can skip if nothing has changed. (not implemented)
///
public DateTime LastScanned { get; set; }
-
+
// Relationship
public Library Library { get; set; }
public int LibraryId { get; set; }
}
-}
\ No newline at end of file
+}
diff --git a/API/Entities/MangaFile.cs b/API/Entities/MangaFile.cs
index 72c620ce9..2865178c7 100644
--- a/API/Entities/MangaFile.cs
+++ b/API/Entities/MangaFile.cs
@@ -38,5 +38,13 @@ namespace API.Entities
{
return File.GetLastWriteTime(FilePath) > LastModified;
}
+
+ ///
+ /// Updates the Last Modified time of the underlying file
+ ///
+ public void UpdateLastModified()
+ {
+ LastModified = File.GetLastWriteTime(FilePath);
+ }
}
}
diff --git a/API/Entities/Series.cs b/API/Entities/Series.cs
index 899e52bfd..5b7bc86bd 100644
--- a/API/Entities/Series.cs
+++ b/API/Entities/Series.cs
@@ -33,7 +33,7 @@ namespace API.Entities
///
/// Summary information related to the Series
///
- public string Summary { get; set; } // TODO: Migrate into SeriesMetdata (with Metadata update)
+ public string Summary { get; set; } // NOTE: Migrate into SeriesMetdata (with Metadata update)
public DateTime Created { get; set; }
public DateTime LastModified { get; set; }
///
diff --git a/API/Interfaces/Repositories/ILibraryRepository.cs b/API/Interfaces/Repositories/ILibraryRepository.cs
index 4d9b03fe4..1ba6ac910 100644
--- a/API/Interfaces/Repositories/ILibraryRepository.cs
+++ b/API/Interfaces/Repositories/ILibraryRepository.cs
@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Threading.Tasks;
+using API.Data.Repositories;
using API.DTOs;
using API.Entities;
using API.Entities.Enums;
@@ -13,7 +14,7 @@ namespace API.Interfaces.Repositories
void Delete(Library library);
Task> GetLibraryDtosAsync();
Task LibraryExists(string libraryName);
- Task GetLibraryForIdAsync(int libraryId);
+ Task GetLibraryForIdAsync(int libraryId, LibraryIncludes includes);
Task GetFullLibraryForIdAsync(int libraryId);
Task GetFullLibraryForIdAsync(int libraryId, int seriesId);
Task> GetLibraryDtosForUsernameAsync(string userName);
diff --git a/API/Interfaces/Repositories/ISeriesRepository.cs b/API/Interfaces/Repositories/ISeriesRepository.cs
index 05fe937eb..c7ac41c53 100644
--- a/API/Interfaces/Repositories/ISeriesRepository.cs
+++ b/API/Interfaces/Repositories/ISeriesRepository.cs
@@ -2,6 +2,7 @@
using System.Collections;
using System.Collections.Generic;
using System.Threading.Tasks;
+using API.Data.Scanner;
using API.DTOs;
using API.DTOs.Filtering;
using API.Entities;
@@ -11,12 +12,10 @@ namespace API.Interfaces.Repositories
{
public interface ISeriesRepository
{
- void Add(Series series);
+ void Attach(Series series);
void Update(Series series);
- Task GetSeriesByNameAsync(string name);
+ void Remove(Series series);
Task DoesSeriesNameExistInLibrary(string name);
- Series GetSeriesByName(string name);
-
///
/// Adds user information like progress, ratings, etc
///
@@ -25,7 +24,6 @@ namespace API.Interfaces.Repositories
///
///
Task> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter);
-
///
/// Does not add user information like progress, ratings, etc.
///
@@ -34,20 +32,8 @@ namespace API.Interfaces.Repositories
///
Task> SearchSeries(int[] libraryIds, string searchQuery);
Task> GetSeriesForLibraryIdAsync(int libraryId);
- Task> GetVolumesDtoAsync(int seriesId, int userId);
- Task> GetVolumes(int seriesId);
Task GetSeriesDtoByIdAsync(int seriesId, int userId);
- Task GetVolumeAsync(int volumeId);
- Task GetVolumeDtoAsync(int volumeId, int userId);
- ///
- /// A fast lookup of just the volume information with no tracking.
- ///
- ///
- ///
- Task GetVolumeDtoAsync(int volumeId);
- Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false);
Task DeleteSeriesAsync(int seriesId);
- Task GetVolumeByIdAsync(int volumeId);
Task GetSeriesByIdAsync(int seriesId);
Task GetChapterIdsForSeriesAsync(int[] seriesIds);
Task>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds);
@@ -62,12 +48,15 @@ namespace API.Interfaces.Repositories
Task GetSeriesCoverImageAsync(int seriesId);
Task> GetInProgress(int userId, int libraryId, UserParams userParams, FilterDto filter);
- Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter);
+ Task> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo
Task GetSeriesMetadata(int seriesId);
Task> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams);
Task> GetFilesForSeries(int seriesId);
Task> GetSeriesDtoForIdsAsync(IEnumerable seriesIds, int userId);
Task> GetAllCoverImagesAsync();
Task> GetLockedCoverImagesAsync();
+ Task> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams);
+ Task GetFullSeriesForSeriesIdAsync(int seriesId);
+ Task GetChunkInfo(int libraryId = 0);
}
}
diff --git a/API/Interfaces/Repositories/IVolumeRepository.cs b/API/Interfaces/Repositories/IVolumeRepository.cs
index 62ec0ef9a..63045a38d 100644
--- a/API/Interfaces/Repositories/IVolumeRepository.cs
+++ b/API/Interfaces/Repositories/IVolumeRepository.cs
@@ -7,9 +7,19 @@ namespace API.Interfaces.Repositories
{
public interface IVolumeRepository
{
+ void Add(Volume volume);
void Update(Volume volume);
+ void Remove(Volume volume);
Task> GetFilesForVolume(int volumeId);
Task GetVolumeCoverImageAsync(int volumeId);
Task> GetChapterIdsByVolumeIds(IReadOnlyList volumeIds);
+
+ // From Series Repo
+ Task> GetVolumesDtoAsync(int seriesId, int userId);
+ Task GetVolumeAsync(int volumeId);
+ Task GetVolumeDtoAsync(int volumeId, int userId);
+ Task> GetVolumesForSeriesAsync(IList seriesIds, bool includeChapters = false);
+ Task> GetVolumes(int seriesId);
+ Task GetVolumeByIdAsync(int volumeId);
}
}
diff --git a/API/Interfaces/Services/IScannerService.cs b/API/Interfaces/Services/IScannerService.cs
index b67290bfc..bab0ca588 100644
--- a/API/Interfaces/Services/IScannerService.cs
+++ b/API/Interfaces/Services/IScannerService.cs
@@ -11,9 +11,8 @@ namespace API.Interfaces.Services
/// cover images if forceUpdate is true.
///
/// Library to scan against
- /// Force overwriting for cover images
- Task ScanLibrary(int libraryId, bool forceUpdate);
+ Task ScanLibrary(int libraryId);
Task ScanLibraries();
- Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token);
+ Task ScanSeries(int libraryId, int seriesId, CancellationToken token);
}
}
diff --git a/API/Interfaces/Services/ReaderService.cs b/API/Interfaces/Services/ReaderService.cs
index eaa3b96d7..f46ccd7d1 100644
--- a/API/Interfaces/Services/ReaderService.cs
+++ b/API/Interfaces/Services/ReaderService.cs
@@ -210,7 +210,7 @@ namespace API.Interfaces.Services
/// -1 if nothing can be found
public async Task GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{
- var volumes = (await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
+ var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
@@ -262,7 +262,7 @@ namespace API.Interfaces.Services
/// -1 if nothing can be found
public async Task GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{
- var volumes = (await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
+ var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
diff --git a/API/Parser/Parser.cs b/API/Parser/Parser.cs
index 0650faf4a..8e2751135 100644
--- a/API/Parser/Parser.cs
+++ b/API/Parser/Parser.cs
@@ -45,6 +45,10 @@ namespace API.Parser
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout);
+ private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]",
+ RegexOptions.IgnoreCase | RegexOptions.Compiled,
+ RegexTimeout);
+
private static readonly Regex[] MangaVolumeRegex = new[]
{
@@ -1064,7 +1068,7 @@ namespace API.Parser
public static string Normalize(string name)
{
- return Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty);
+ return NormalizeRegex.Replace(name, string.Empty).ToLower();
}
diff --git a/API/Services/ArchiveService.cs b/API/Services/ArchiveService.cs
index bfa36595c..1b68956e5 100644
--- a/API/Services/ArchiveService.cs
+++ b/API/Services/ArchiveService.cs
@@ -13,10 +13,8 @@ using API.Interfaces.Services;
using API.Services.Tasks;
using Kavita.Common;
using Microsoft.Extensions.Logging;
-using Microsoft.IO;
using SharpCompress.Archives;
using SharpCompress.Common;
-using Image = NetVips.Image;
namespace API.Services
{
@@ -28,14 +26,12 @@ namespace API.Services
{
private readonly ILogger _logger;
private readonly IDirectoryService _directoryService;
- private readonly NaturalSortComparer _comparer;
private const string ComicInfoFilename = "comicinfo";
public ArchiveService(ILogger logger, IDirectoryService directoryService)
{
_logger = logger;
_directoryService = directoryService;
- _comparer = new NaturalSortComparer();
}
///
@@ -81,13 +77,11 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
- _logger.LogDebug("Using default compression handling");
- using ZipArchive archive = ZipFile.OpenRead(archivePath);
+ using var archive = ZipFile.OpenRead(archivePath);
return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName));
}
case ArchiveLibrary.SharpCompress:
{
- _logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
return archive.Entries.Count(entry => !entry.IsDirectory &&
!Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
@@ -130,7 +124,7 @@ namespace API.Services
/// Entry name of match, null if no match
public string FirstFileEntry(IEnumerable entryFullNames)
{
- var result = entryFullNames.OrderBy(Path.GetFileName, _comparer)
+ var result = entryFullNames.OrderBy(Path.GetFileName, new NaturalSortComparer())
.FirstOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x)
&& Parser.Parser.IsImage(x)
&& !x.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith));
@@ -160,7 +154,6 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
- _logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
var entryNames = archive.Entries.Select(e => e.FullName).ToArray();
@@ -172,7 +165,6 @@ namespace API.Services
}
case ArchiveLibrary.SharpCompress:
{
- _logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList();
@@ -316,7 +308,6 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
- _logger.LogTrace("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
var entry = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
&& Path.GetFileNameWithoutExtension(x.Name)?.ToLower() == ComicInfoFilename
@@ -332,7 +323,6 @@ namespace API.Services
}
case ArchiveLibrary.SharpCompress:
{
- _logger.LogTrace("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
@@ -410,14 +400,12 @@ namespace API.Services
{
case ArchiveLibrary.Default:
{
- _logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath);
ExtractArchiveEntries(archive, extractPath);
break;
}
case ArchiveLibrary.SharpCompress:
{
- _logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
diff --git a/API/Services/BookService.cs b/API/Services/BookService.cs
index 6231de20a..b63a0253e 100644
--- a/API/Services/BookService.cs
+++ b/API/Services/BookService.cs
@@ -393,7 +393,7 @@ namespace API.Services
///
public string GetCoverImage(string fileFilePath, string fileName)
{
- if (!IsValidFile(fileFilePath)) return String.Empty;
+ if (!IsValidFile(fileFilePath)) return string.Empty;
if (Parser.Parser.IsPdf(fileFilePath))
{
@@ -411,8 +411,8 @@ namespace API.Services
?? epubBook.Content.Images.Values.FirstOrDefault();
if (coverImageContent == null) return string.Empty;
+ using var stream = coverImageContent.GetContentStream();
- using var stream = StreamManager.GetStream("BookService.GetCoverImage", coverImageContent.ReadContent());
return ImageService.WriteCoverThumbnail(stream, fileName);
}
catch (Exception ex)
diff --git a/API/Services/ImageService.cs b/API/Services/ImageService.cs
index 0f0f3aa16..fddb3fffe 100644
--- a/API/Services/ImageService.cs
+++ b/API/Services/ImageService.cs
@@ -47,6 +47,8 @@ namespace API.Services
var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions)
.OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault();
+
+
return firstImage;
}
@@ -73,7 +75,7 @@ namespace API.Services
{
using var thumbnail = Image.Thumbnail(path, ThumbnailWidth);
var filename = fileName + ".png";
- thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png"));
+ thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, filename));
return filename;
}
catch (Exception e)
diff --git a/API/Services/MetadataService.cs b/API/Services/MetadataService.cs
index d443f9f23..88220df7d 100644
--- a/API/Services/MetadataService.cs
+++ b/API/Services/MetadataService.cs
@@ -5,9 +5,11 @@ using System.IO;
using System.Linq;
using System.Threading.Tasks;
using API.Comparators;
+using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
+using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.SignalR;
@@ -74,7 +76,7 @@ namespace API.Services
private string GetCoverImage(MangaFile file, int volumeId, int chapterId)
{
- file.LastModified = DateTime.Now;
+ file.UpdateLastModified();
switch (file.Format)
{
case MangaFormat.Pdf:
@@ -102,6 +104,7 @@ namespace API.Services
if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked))
{
+ _logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile?.FilePath);
chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id);
return true;
}
@@ -117,8 +120,7 @@ namespace API.Services
public bool UpdateMetadata(Volume volume, bool forceUpdate)
{
// We need to check if Volume coverImage matches first chapters if forceUpdate is false
- if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate
- , false)) return false;
+ if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate)) return false;
volume.Chapters ??= new List();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault();
@@ -183,11 +185,9 @@ namespace API.Services
if (!string.IsNullOrEmpty(series.Summary))
{
series.Summary = summary;
- firstFile.LastModified = DateTime.Now;
return true;
}
}
- firstFile.LastModified = DateTime.Now; // NOTE: Should I put this here as well since it might not have actually been parsed?
return false;
}
@@ -200,34 +200,57 @@ namespace API.Services
/// Force updating cover image even if underlying file has not been modified or chapter already has a cover image
public async Task RefreshMetadata(int libraryId, bool forceUpdate = false)
{
- var sw = Stopwatch.StartNew();
- var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId);
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
+ _logger.LogInformation("[MetadataService] Beginning metadata refresh of {LibraryName}", library.Name);
- // PERF: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used
- _logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name);
- foreach (var series in library.Series)
+ var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
+ var stopwatch = Stopwatch.StartNew();
+ var totalTime = 0L;
+
+ for (var chunk = 0; chunk <= chunkInfo.TotalChunks; chunk++)
{
- var volumeUpdated = false;
- foreach (var volume in series.Volumes)
- {
- var chapterUpdated = false;
- foreach (var chapter in volume.Chapters)
+ totalTime += stopwatch.ElapsedMilliseconds;
+ stopwatch.Restart();
+ _logger.LogDebug($"[MetadataService] Processing chunk {chunk} / {chunkInfo.TotalChunks} with size {chunkInfo.ChunkSize} Series ({chunk * chunkInfo.ChunkSize} - {(chunk + 1) * chunkInfo.ChunkSize}");
+ var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
+ new UserParams()
{
- chapterUpdated = UpdateMetadata(chapter, forceUpdate);
+ PageNumber = chunk,
+ PageSize = chunkInfo.ChunkSize
+ });
+
+ Parallel.ForEach(nonLibrarySeries, series =>
+ {
+ _logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
+ var volumeUpdated = false;
+ foreach (var volume in series.Volumes)
+ {
+ var chapterUpdated = false;
+ foreach (var chapter in volume.Chapters)
+ {
+ chapterUpdated = UpdateMetadata(chapter, forceUpdate);
+ }
+
+ volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
}
- volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
+ UpdateMetadata(series, volumeUpdated || forceUpdate);
+ });
+
+ if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
+ {
+ _logger.LogInformation(
+ "[MetadataService] Processed {SeriesStart} - {SeriesEnd} series in {ElapsedScanTime} milliseconds for {LibraryName}",
+ chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize, stopwatch.ElapsedMilliseconds, library.Name);
+
+ foreach (var series in nonLibrarySeries)
+ {
+ await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(library.Id, series.Id));
+ }
}
-
- UpdateMetadata(series, volumeUpdated || forceUpdate);
- _unitOfWork.SeriesRepository.Update(series);
}
-
- if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
- {
- _logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
- }
+ _logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
}
@@ -239,15 +262,13 @@ namespace API.Services
public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false)
{
var sw = Stopwatch.StartNew();
- var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId);
-
- var series = library.Series.SingleOrDefault(s => s.Id == seriesId);
+ var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
if (series == null)
{
- _logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId);
+ _logger.LogError("[MetadataService] Series {SeriesId} was not found on Library {LibraryId}", seriesId, libraryId);
return;
}
- _logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name);
+ _logger.LogInformation("[MetadataService] Beginning metadata refresh of {SeriesName}", series.Name);
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
@@ -261,14 +282,14 @@ namespace API.Services
}
UpdateMetadata(series, volumeUpdated || forceUpdate);
- _unitOfWork.SeriesRepository.Update(series);
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
- _logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
- await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.RefreshMetadataEvent(libraryId, seriesId));
+ await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(series.LibraryId, series.Id));
}
+
+ _logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
}
}
}
diff --git a/API/Services/TaskScheduler.cs b/API/Services/TaskScheduler.cs
index 2d1b25a7d..6400dd79e 100644
--- a/API/Services/TaskScheduler.cs
+++ b/API/Services/TaskScheduler.cs
@@ -119,7 +119,7 @@ namespace API.Services
public void ScanLibrary(int libraryId, bool forceUpdate = false)
{
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
- BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate));
+ BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId));
// When we do a scan, force cache to re-unpack in case page numbers change
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
}
@@ -141,7 +141,7 @@ namespace API.Services
BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory));
}
- public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false)
+ public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = true)
{
_logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId);
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
@@ -150,7 +150,7 @@ namespace API.Services
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
{
_logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId);
- BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, forceUpdate, CancellationToken.None));
+ BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, CancellationToken.None));
}
public void BackupDatabase()
diff --git a/API/Services/Tasks/ScannerService.cs b/API/Services/Tasks/ScannerService.cs
index 15bb715c7..e80955b29 100644
--- a/API/Services/Tasks/ScannerService.cs
+++ b/API/Services/Tasks/ScannerService.cs
@@ -7,9 +7,11 @@ using System.Threading;
using System.Threading.Tasks;
using API.Comparators;
using API.Data;
+using API.Data.Repositories;
using API.Entities;
using API.Entities.Enums;
using API.Extensions;
+using API.Helpers;
using API.Interfaces;
using API.Interfaces.Services;
using API.Parser;
@@ -46,81 +48,103 @@ namespace API.Services.Tasks
[DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
- public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token)
+ public async Task ScanSeries(int libraryId, int seriesId, CancellationToken token)
{
+ var sw = new Stopwatch();
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId);
- var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId, seriesId);
- var dirs = DirectoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(f => f.Path), files.Select(f => f.FilePath).ToList());
- var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{ seriesId });
+ var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
+ var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
+ var folderPaths = library.Folders.Select(f => f.Path).ToList();
+ var dirs = DirectoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList());
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanner = new ParseScannedFiles(_bookService, _logger);
var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime);
- // If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
- // Hence we clear out anything but what we selected for
- var firstSeries = library.Series.FirstOrDefault();
+ // Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
+ RemoveParsedInfosNotForSeries(parsedSeries, series);
+
+ // If nothing was found, first validate any of the files still exist. If they don't then we have a deletion and can skip the rest of the logic flow
+ if (parsedSeries.Count == 0)
+ {
+ var anyFilesExist =
+ (await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)).Any(m => File.Exists(m.FilePath));
+
+ if (!anyFilesExist)
+ {
+ _unitOfWork.SeriesRepository.Remove(series);
+ await CommitAndSend(libraryId, seriesId, totalFiles, parsedSeries, sw, scanElapsedTime, series, chapterIds, token);
+ }
+ else
+ {
+ // We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely,
+ // the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root
+ // is the series folder.
+ var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName));
+ if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
+ {
+ dirs = new Dictionary();
+ var path = Directory.GetParent(existingFolder)?.FullName;
+ if (!folderPaths.Contains(path) || !folderPaths.Any(p => p.Contains(path ?? string.Empty)))
+ {
+ _logger.LogInformation("[ScanService] Aborted: {SeriesName} has bad naming convention and sits at root of library. Cannot scan series without deletion occuring. Correct file names to have Series Name within it or perform Scan Library", series.OriginalName);
+ return;
+ }
+ if (!string.IsNullOrEmpty(path))
+ {
+ dirs[path] = string.Empty;
+ }
+ }
+
+ _logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory.", series.OriginalName);
+ scanner = new ParseScannedFiles(_bookService, _logger);
+ parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
+ totalFiles += totalFiles2;
+ scanElapsedTime += scanElapsedTime2;
+ RemoveParsedInfosNotForSeries(parsedSeries, series);
+ }
+ }
+
+ // At this point, parsedSeries will have at least one key and we can perform the update. If it still doesn't, just return and don't do anything
+ if (parsedSeries.Count == 0) return;
+
+ UpdateSeries(series, parsedSeries);
+ await CommitAndSend(libraryId, seriesId, totalFiles, parsedSeries, sw, scanElapsedTime, series, chapterIds, token);
+ }
+
+ private static void RemoveParsedInfosNotForSeries(Dictionary> parsedSeries, Series series)
+ {
var keys = parsedSeries.Keys;
- foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
+ foreach (var key in keys.Where(key =>
+ !series.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || series.Format != key.Format))
{
parsedSeries.Remove(key);
}
+ }
- if (parsedSeries.Count == 0)
+ private async Task CommitAndSend(int libraryId, int seriesId, int totalFiles,
+ Dictionary> parsedSeries, Stopwatch sw, long scanElapsedTime, Series series, int[] chapterIds, CancellationToken token)
+ {
+ if (await _unitOfWork.CommitAsync())
{
- // We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely,
- // the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root
- // is the series folder.
- var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName));
- if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
- {
- dirs = new Dictionary();
- var path = Path.GetPathRoot(existingFolder);
- if (!string.IsNullOrEmpty(path))
- {
- dirs[path] = string.Empty;
- }
- }
- _logger.LogDebug("{SeriesName} has bad naming convention, forcing rescan at a higher directory.", series.OriginalName);
- scanner = new ParseScannedFiles(_bookService, _logger);
- parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
- totalFiles += totalFiles2;
- scanElapsedTime += scanElapsedTime2;
+ _logger.LogInformation(
+ "Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
+ totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
- // If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
- // Hence we clear out anything but what we selected for
- firstSeries = library.Series.FirstOrDefault();
- keys = parsedSeries.Keys;
- foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
- {
- parsedSeries.Remove(key);
- }
+ await CleanupDbEntities();
+ BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, false));
+ BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
+ // Tell UI that this series is done
+ await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId, series.Name),
+ cancellationToken: token);
+ }
+ else
+ {
+ _logger.LogCritical(
+ "There was a critical error that resulted in a failed scan. Please check logs and rescan");
+ await _unitOfWork.RollbackAsync();
}
-
- var sw = new Stopwatch();
- UpdateLibrary(library, parsedSeries);
-
- _unitOfWork.LibraryRepository.Update(library);
- if (await _unitOfWork.CommitAsync())
- {
- _logger.LogInformation(
- "Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
- totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
-
- await CleanupDbEntities();
- BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
- BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
- // Tell UI that this series is done
- await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId), cancellationToken: token);
- }
- else
- {
- _logger.LogCritical(
- "There was a critical error that resulted in a failed scan. Please check logs and rescan");
- await _unitOfWork.RollbackAsync();
- }
-
}
@@ -132,7 +156,7 @@ namespace API.Services.Tasks
var libraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync();
foreach (var lib in libraries)
{
- await ScanLibrary(lib.Id, false);
+ await ScanLibrary(lib.Id);
}
_logger.LogInformation("Scan of All Libraries Finished");
}
@@ -144,24 +168,23 @@ namespace API.Services.Tasks
/// ie) all entities will be rechecked for new cover images and comicInfo.xml changes
///
///
- ///
[DisableConcurrentExecution(360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
- public async Task ScanLibrary(int libraryId, bool forceUpdate)
+ public async Task ScanLibrary(int libraryId)
{
Library library;
try
{
- library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId);
+ library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
}
catch (Exception ex)
{
// This usually only fails if user is not authenticated.
- _logger.LogError(ex, "There was an issue fetching Library {LibraryId}", libraryId);
+ _logger.LogError(ex, "[ScannerService] There was an issue fetching Library {LibraryId}", libraryId);
return;
}
- _logger.LogInformation("Beginning file scan on {LibraryName}", library.Name);
+ _logger.LogInformation("[ScannerService] Beginning file scan on {LibraryName}", library.Name);
var scanner = new ParseScannedFiles(_bookService, _logger);
var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime);
@@ -171,24 +194,24 @@ namespace API.Services.Tasks
}
var sw = Stopwatch.StartNew();
- UpdateLibrary(library, series);
+ await UpdateLibrary(library, series);
_unitOfWork.LibraryRepository.Update(library);
if (await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
- "Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}",
+ "[ScannerService] Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}",
totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
}
else
{
_logger.LogCritical(
- "There was a critical error that resulted in a failed scan. Please check logs and rescan");
+ "[ScannerService] There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
await CleanupAbandonedChapters();
- BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate));
+ BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, false));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibrary, MessageFactory.ScanLibraryEvent(libraryId, "complete"));
}
@@ -212,78 +235,153 @@ namespace API.Services.Tasks
_logger.LogInformation("Removed {Count} abandoned collection tags", cleanedUp);
}
- private void UpdateLibrary(Library library, Dictionary> parsedSeries)
+ private async Task UpdateLibrary(Library library, Dictionary> parsedSeries)
{
- if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries));
+ if (parsedSeries == null) return;
- // First, remove any series that are not in parsedSeries list
- var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries).ToList();
- library.Series = RemoveMissingSeries(library.Series, missingSeries, out var removeCount);
- if (removeCount > 0)
+ // Library contains no Series, so we need to fetch series in groups of ChunkSize
+ var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
+ var stopwatch = Stopwatch.StartNew();
+ var totalTime = 0L;
+
+ // Update existing series
+ _logger.LogDebug("[ScannerService] Updating existing series");
+ for (var chunk = 0; chunk <= chunkInfo.TotalChunks; chunk++)
{
- _logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount);
- foreach (var s in missingSeries)
- {
- _logger.LogDebug("Removed {SeriesName} ({Format})", s.Name, s.Format);
- }
+ totalTime += stopwatch.ElapsedMilliseconds;
+ stopwatch.Restart();
+ _logger.LogDebug($"[ScannerService] Processing chunk {chunk} / {chunkInfo.TotalChunks} with size {chunkInfo.ChunkSize} Series ({chunk * chunkInfo.ChunkSize} - {(chunk + 1) * chunkInfo.ChunkSize}");
+ var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id, new UserParams()
+ {
+ PageNumber = chunk,
+ PageSize = chunkInfo.ChunkSize
+ });
+
+ // First, remove any series that are not in parsedSeries list
+ var missingSeries = FindSeriesNotOnDisk(nonLibrarySeries, parsedSeries).ToList();
+
+ foreach (var missing in missingSeries)
+ {
+ _unitOfWork.SeriesRepository.Remove(missing);
+ }
+
+ var cleanedSeries = RemoveMissingSeries(nonLibrarySeries, missingSeries, out var removeCount);
+ if (removeCount > 0)
+ {
+ _logger.LogInformation("[ScannerService] Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount);
+ foreach (var s in missingSeries)
+ {
+ _logger.LogDebug("[ScannerService] Removed {SeriesName} ({Format})", s.Name, s.Format);
+ }
+ }
+
+ // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
+ var librarySeries = cleanedSeries.ToList();
+ Parallel.ForEach(librarySeries, (series) => { UpdateSeries(series, parsedSeries); });
+
+ await _unitOfWork.CommitAsync();
+ _logger.LogInformation(
+ "[ScannerService] Processed {SeriesStart} - {SeriesEnd} series in {ElapsedScanTime} milliseconds for {LibraryName}",
+ chunk * chunkInfo.ChunkSize, (chunk + 1) * chunkInfo.ChunkSize, totalTime, library.Name);
+
+ // Emit any series removed
+ foreach (var missing in missingSeries)
+ {
+ await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesRemoved, MessageFactory.SeriesRemovedEvent(missing.Id, missing.Name, library.Id));
+ }
}
+
// Add new series that have parsedInfos
+ _logger.LogDebug("[ScannerService] Adding new series");
+ var newSeries = new List();
+ var allSeries = (await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(library.Id)).ToList();
foreach (var (key, infos) in parsedSeries)
{
// Key is normalized already
- Series existingSeries;
- try
- {
- existingSeries = library.Series.SingleOrDefault(s =>
- (s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName)
- && (s.Format == key.Format || s.Format == MangaFormat.Unknown));
- }
- catch (Exception e)
- {
- _logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key.NormalizedName);
- var duplicateSeries = library.Series.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
- foreach (var series in duplicateSeries)
- {
- _logger.LogCritical("{Key} maps with {Series}", key.Name, series.OriginalName);
- }
+ Series existingSeries;
+ try
+ {
+ existingSeries = allSeries.SingleOrDefault(s =>
+ (s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName)
+ && (s.Format == key.Format || s.Format == MangaFormat.Unknown));
+ }
+ catch (Exception e)
+ {
+ _logger.LogCritical(e, "[ScannerService] There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it. This will be skipped", key.NormalizedName);
+ var duplicateSeries = allSeries.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
+ foreach (var series in duplicateSeries)
+ {
+ _logger.LogCritical("[ScannerService] Duplicate Series Found: {Key} maps with {Series}", key.Name, series.OriginalName);
+ }
- continue;
- }
- if (existingSeries == null)
- {
- existingSeries = DbFactory.Series(infos[0].Series);
- existingSeries.Format = key.Format;
- library.Series.Add(existingSeries);
- }
+ continue;
+ }
- existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name);
- existingSeries.OriginalName ??= infos[0].Series;
- existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List());
- existingSeries.Format = key.Format;
+ if (existingSeries != null) continue;
+
+ existingSeries = DbFactory.Series(infos[0].Series);
+ existingSeries.Format = key.Format;
+ newSeries.Add(existingSeries);
}
- // Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
- var librarySeries = library.Series.ToList();
- Parallel.ForEach(librarySeries, (series) =>
+ foreach(var series in newSeries)
{
- try
- {
- _logger.LogInformation("Processing series {SeriesName}", series.OriginalName);
- UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
- series.Pages = series.Volumes.Sum(v => v.Pages);
- }
- catch (Exception ex)
- {
- _logger.LogError(ex, "There was an exception updating volumes for {SeriesName}", series.Name);
- }
- });
+ try
+ {
+ _logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName);
+ UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
+ series.Pages = series.Volumes.Sum(v => v.Pages);
+ series.LibraryId = library.Id; // We have to manually set this since we aren't adding the series to the Library's series.
+ _unitOfWork.SeriesRepository.Attach(series);
+ if (await _unitOfWork.CommitAsync())
+ {
+ _logger.LogInformation(
+ "[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
+ newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
- // Last step, remove any series that have no pages
- library.Series = library.Series.Where(s => s.Pages > 0).ToList();
+ // Inform UI of new series added
+ await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAdded, MessageFactory.SeriesAddedEvent(series.Id, series.Name, library.Id));
+ }
+ else
+ {
+ // This is probably not needed. Better to catch the exception.
+ _logger.LogCritical(
+ "[ScannerService] There was a critical error that resulted in a failed scan. Please check logs and rescan");
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
+ }
+ }
}
- public IEnumerable FindSeriesNotOnDisk(ICollection existingSeries, Dictionary> parsedSeries)
+ private void UpdateSeries(Series series, Dictionary> parsedSeries)
+ {
+ try
+ {
+ _logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
+
+ var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray();
+ UpdateVolumes(series, parsedInfos);
+ series.Pages = series.Volumes.Sum(v => v.Pages);
+
+ series.NormalizedName = Parser.Parser.Normalize(series.Name);
+ series.Metadata ??= DbFactory.SeriesMetadata(new List());
+ if (series.Format == MangaFormat.Unknown)
+ {
+ series.Format = parsedInfos[0].Format;
+ }
+ series.OriginalName ??= parsedInfos[0].Series;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
+ }
+ }
+
+ public static IEnumerable FindSeriesNotOnDisk(IEnumerable existingSeries, Dictionary> parsedSeries)
{
var foundSeries = parsedSeries.Select(s => s.Key.Name).ToList();
return existingSeries.Where(es => !es.NameInList(foundSeries) && !SeriesHasMatchingParserInfoFormat(es, parsedSeries));
@@ -332,7 +430,7 @@ namespace API.Services.Tasks
/// Series not found on disk or can't be parsed
///
/// the updated existingSeries
- public static ICollection RemoveMissingSeries(ICollection existingSeries, IEnumerable missingSeries, out int removeCount)
+ public static IList RemoveMissingSeries(IList existingSeries, IEnumerable missingSeries, out int removeCount)
{
var existingCount = existingSeries.Count;
var missingList = missingSeries.ToList();
@@ -351,7 +449,7 @@ namespace API.Services.Tasks
var startingVolumeCount = series.Volumes.Count;
// Add new volumes and update chapters per volume
var distinctVolumes = parsedInfos.DistinctVolumes();
- _logger.LogDebug("Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
+ _logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
foreach (var volumeNumber in distinctVolumes)
{
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
@@ -359,9 +457,10 @@ namespace API.Services.Tasks
{
volume = DbFactory.Volume(volumeNumber);
series.Volumes.Add(volume);
+ _unitOfWork.VolumeRepository.Add(volume);
}
- _logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
+ _logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages);
@@ -371,23 +470,26 @@ namespace API.Services.Tasks
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
if (series.Volumes.Count != nonDeletedVolumes.Count)
{
- _logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
+ _logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
(series.Volumes.Count - nonDeletedVolumes.Count), series.Name);
var deletedVolumes = series.Volumes.Except(nonDeletedVolumes);
foreach (var volume in deletedVolumes)
{
- var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files";
- if (new FileInfo(file).Exists)
- {
- _logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file);
- }
- _logger.LogDebug("Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
+ var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? "";
+ if (!string.IsNullOrEmpty(file) && File.Exists(file))
+ {
+ _logger.LogError(
+ "[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
+ file);
+ }
+
+ _logger.LogDebug("[ScannerService] Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
}
series.Volumes = nonDeletedVolumes;
}
- _logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
+ _logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
series.Name, startingVolumeCount, series.Volumes.Count);
}
@@ -417,7 +519,7 @@ namespace API.Services.Tasks
if (chapter == null)
{
_logger.LogDebug(
- "Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
+ "[ScannerService] Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
volume.Chapters.Add(DbFactory.Chapter(info));
}
else
@@ -454,7 +556,7 @@ namespace API.Services.Tasks
{
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
{
- _logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
+ _logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
volume.Chapters.Remove(existingChapter);
}
else
@@ -470,42 +572,47 @@ namespace API.Services.Tasks
private MangaFile CreateMangaFile(ParserInfo info)
{
- switch (info.Format)
+ MangaFile mangaFile = null;
+ switch (info.Format)
{
case MangaFormat.Archive:
{
- return new MangaFile()
+ mangaFile = new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath)
};
+ break;
}
case MangaFormat.Pdf:
case MangaFormat.Epub:
{
- return new MangaFile()
+ mangaFile = new MangaFile()
{
FilePath = info.FullFilePath,
Format = info.Format,
Pages = _bookService.GetNumberOfPages(info.FullFilePath)
};
+ break;
}
case MangaFormat.Image:
{
- return new MangaFile()
- {
- FilePath = info.FullFilePath,
- Format = info.Format,
- Pages = 1
- };
+ mangaFile = new MangaFile()
+ {
+ FilePath = info.FullFilePath,
+ Format = info.Format,
+ Pages = 1
+ };
+ break;
}
default:
_logger.LogWarning("[Scanner] Ignoring {Filename}. File type is not supported", info.Filename);
break;
}
- return null;
+ mangaFile?.UpdateLastModified();
+ return mangaFile;
}
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
@@ -515,20 +622,31 @@ namespace API.Services.Tasks
if (existingFile != null)
{
existingFile.Format = info.Format;
- if (existingFile.HasFileBeenModified() || existingFile.Pages == 0)
+ if (!existingFile.HasFileBeenModified() && existingFile.Pages != 0) return;
+ switch (existingFile.Format)
{
- existingFile.Pages = (existingFile.Format == MangaFormat.Epub || existingFile.Format == MangaFormat.Pdf)
- ? _bookService.GetNumberOfPages(info.FullFilePath)
- : _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
+ case MangaFormat.Epub:
+ case MangaFormat.Pdf:
+ existingFile.Pages = _bookService.GetNumberOfPages(info.FullFilePath);
+ break;
+ case MangaFormat.Image:
+ existingFile.Pages = 1;
+ break;
+ case MangaFormat.Unknown:
+ existingFile.Pages = 0;
+ break;
+ case MangaFormat.Archive:
+ existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
+ break;
}
+ existingFile.LastModified = File.GetLastWriteTime(info.FullFilePath);
}
else
{
var file = CreateMangaFile(info);
- if (file != null)
- {
- chapter.Files.Add(file);
- }
+ if (file == null) return;
+
+ chapter.Files.Add(file);
}
}
}
diff --git a/API/SignalR/MessageFactory.cs b/API/SignalR/MessageFactory.cs
index ad6eed5c9..8e107cd9a 100644
--- a/API/SignalR/MessageFactory.cs
+++ b/API/SignalR/MessageFactory.cs
@@ -5,14 +5,43 @@ namespace API.SignalR
{
public static class MessageFactory
{
- public static SignalRMessage ScanSeriesEvent(int seriesId)
+ public static SignalRMessage ScanSeriesEvent(int seriesId, string seriesName)
{
return new SignalRMessage()
{
Name = SignalREvents.ScanSeries,
Body = new
{
- SeriesId = seriesId
+ SeriesId = seriesId,
+ SeriesName = seriesName
+ }
+ };
+ }
+
+ public static SignalRMessage SeriesAddedEvent(int seriesId, string seriesName, int libraryId)
+ {
+ return new SignalRMessage()
+ {
+ Name = SignalREvents.SeriesAdded,
+ Body = new
+ {
+ SeriesId = seriesId,
+ SeriesName = seriesName,
+ LibraryId = libraryId
+ }
+ };
+ }
+
+ public static SignalRMessage SeriesRemovedEvent(int seriesId, string seriesName, int libraryId)
+ {
+ return new SignalRMessage()
+ {
+ Name = SignalREvents.SeriesRemoved,
+ Body = new
+ {
+ SeriesId = seriesId,
+ SeriesName = seriesName,
+ LibraryId = libraryId
}
};
}
diff --git a/API/SignalR/SignalREvents.cs b/API/SignalR/SignalREvents.cs
index fcd077146..6799780ff 100644
--- a/API/SignalR/SignalREvents.cs
+++ b/API/SignalR/SignalREvents.cs
@@ -6,6 +6,8 @@
public const string ScanSeries = "ScanSeries";
public const string RefreshMetadata = "RefreshMetadata";
public const string ScanLibrary = "ScanLibrary";
+ public const string SeriesAdded = "SeriesAdded";
+ public const string SeriesRemoved = "SeriesRemoved";
}
}
diff --git a/UI/Web/src/app/_models/events/refresh-metadata-event.ts b/UI/Web/src/app/_models/events/refresh-metadata-event.ts
new file mode 100644
index 000000000..51fda3301
--- /dev/null
+++ b/UI/Web/src/app/_models/events/refresh-metadata-event.ts
@@ -0,0 +1,4 @@
+export interface RefreshMetadataEvent {
+ libraryId: number;
+ seriesId: number;
+}
\ No newline at end of file
diff --git a/UI/Web/src/app/_models/events/scan-series-event.ts b/UI/Web/src/app/_models/events/scan-series-event.ts
index 45f7a07bc..f60d82e17 100644
--- a/UI/Web/src/app/_models/events/scan-series-event.ts
+++ b/UI/Web/src/app/_models/events/scan-series-event.ts
@@ -1,3 +1,4 @@
export interface ScanSeriesEvent {
seriesId: number;
+ seriesName: string;
}
\ No newline at end of file
diff --git a/UI/Web/src/app/_models/events/series-added-event.ts b/UI/Web/src/app/_models/events/series-added-event.ts
new file mode 100644
index 000000000..3e9c5af47
--- /dev/null
+++ b/UI/Web/src/app/_models/events/series-added-event.ts
@@ -0,0 +1,5 @@
+export interface SeriesAddedEvent {
+ libraryId: number;
+ seriesId: number;
+ seriesName: string;
+}
\ No newline at end of file
diff --git a/UI/Web/src/app/_services/message-hub.service.ts b/UI/Web/src/app/_services/message-hub.service.ts
index f5d193f6a..b01b45754 100644
--- a/UI/Web/src/app/_services/message-hub.service.ts
+++ b/UI/Web/src/app/_services/message-hub.service.ts
@@ -2,17 +2,23 @@ import { EventEmitter, Injectable } from '@angular/core';
import { HubConnection, HubConnectionBuilder } from '@microsoft/signalr';
import { NgbModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { User } from '@sentry/angular';
-import { BehaviorSubject, ReplaySubject } from 'rxjs';
+import { ToastrService } from 'ngx-toastr';
+import { ReplaySubject } from 'rxjs';
+import { take } from 'rxjs/operators';
import { environment } from 'src/environments/environment';
import { UpdateNotificationModalComponent } from '../shared/update-notification/update-notification-modal.component';
+import { RefreshMetadataEvent } from '../_models/events/refresh-metadata-event';
import { ScanLibraryEvent } from '../_models/events/scan-library-event';
import { ScanSeriesEvent } from '../_models/events/scan-series-event';
+import { SeriesAddedEvent } from '../_models/events/series-added-event';
+import { AccountService } from './account.service';
export enum EVENTS {
UpdateAvailable = 'UpdateAvailable',
ScanSeries = 'ScanSeries',
ScanLibrary = 'ScanLibrary',
RefreshMetadata = 'RefreshMetadata',
+ SeriesAdded = 'SeriesAdded'
}
export interface Message {
@@ -33,8 +39,18 @@ export class MessageHubService {
public scanSeries: EventEmitter = new EventEmitter();
public scanLibrary: EventEmitter = new EventEmitter();
+ public seriesAdded: EventEmitter = new EventEmitter();
+ public refreshMetadata: EventEmitter = new EventEmitter();
- constructor(private modalService: NgbModal) { }
+ isAdmin: boolean = false;
+
+ constructor(private modalService: NgbModal, private toastr: ToastrService, private accountService: AccountService) {
+ this.accountService.currentUser$.pipe(take(1)).subscribe(user => {
+ if (user) {
+ this.isAdmin = this.accountService.hasAdminRole(user);
+ }
+ });
+ }
createHubConnection(user: User) {
this.hubConnection = new HubConnectionBuilder()
@@ -71,6 +87,25 @@ export class MessageHubService {
// }
});
+ this.hubConnection.on(EVENTS.SeriesAdded, resp => {
+ this.messagesSource.next({
+ event: EVENTS.SeriesAdded,
+ payload: resp.body
+ });
+ this.seriesAdded.emit(resp.body);
+ if (this.isAdmin) {
+ this.toastr.info('Series ' + (resp.body as SeriesAddedEvent).seriesName + ' added');
+ }
+ });
+
+ this.hubConnection.on(EVENTS.RefreshMetadata, resp => {
+ this.messagesSource.next({
+ event: EVENTS.RefreshMetadata,
+ payload: resp.body
+ });
+ this.refreshMetadata.emit(resp.body);
+ });
+
this.hubConnection.on(EVENTS.UpdateAvailable, resp => {
this.messagesSource.next({
event: EVENTS.UpdateAvailable,
diff --git a/UI/Web/src/app/cards/series-card/series-card.component.ts b/UI/Web/src/app/cards/series-card/series-card.component.ts
index 57c2af992..47258649a 100644
--- a/UI/Web/src/app/cards/series-card/series-card.component.ts
+++ b/UI/Web/src/app/cards/series-card/series-card.component.ts
@@ -2,7 +2,7 @@ import { Component, EventEmitter, Input, OnChanges, OnInit, Output } from '@angu
import { Router } from '@angular/router';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { ToastrService } from 'ngx-toastr';
-import { take } from 'rxjs/operators';
+import { take, takeWhile } from 'rxjs/operators';
import { Series } from 'src/app/_models/series';
import { AccountService } from 'src/app/_services/account.service';
import { ImageService } from 'src/app/_services/image.service';
@@ -11,6 +11,8 @@ import { SeriesService } from 'src/app/_services/series.service';
import { ConfirmService } from 'src/app/shared/confirm.service';
import { ActionService } from 'src/app/_services/action.service';
import { EditSeriesModalComponent } from '../_modals/edit-series-modal/edit-series-modal.component';
+import { RefreshMetadataEvent } from 'src/app/_models/events/refresh-metadata-event';
+import { MessageHubService } from 'src/app/_services/message-hub.service';
@Component({
selector: 'app-series-card',
@@ -46,7 +48,7 @@ export class SeriesCardComponent implements OnInit, OnChanges {
private seriesService: SeriesService, private toastr: ToastrService,
private modalService: NgbModal, private confirmService: ConfirmService,
public imageService: ImageService, private actionFactoryService: ActionFactoryService,
- private actionService: ActionService) {
+ private actionService: ActionService, private hubService: MessageHubService) {
this.accountService.currentUser$.pipe(take(1)).subscribe(user => {
if (user) {
this.isAdmin = this.accountService.hasAdminRole(user);
@@ -58,6 +60,14 @@ export class SeriesCardComponent implements OnInit, OnChanges {
ngOnInit(): void {
if (this.data) {
this.imageUrl = this.imageService.randomize(this.imageService.getSeriesCoverImage(this.data.id));
+
+ this.hubService.refreshMetadata.pipe(takeWhile(event => event.libraryId === this.libraryId)).subscribe((event: RefreshMetadataEvent) => {
+ if (this.data.id === event.seriesId) {
+ this.imageUrl = this.imageService.randomize(this.imageService.getSeriesCoverImage(this.data.id));
+ console.log('Refresh event came through, updating cover image');
+ }
+
+ });
}
}
diff --git a/UI/Web/src/app/library-detail/library-detail.component.ts b/UI/Web/src/app/library-detail/library-detail.component.ts
index fdd58fb33..409ea9267 100644
--- a/UI/Web/src/app/library-detail/library-detail.component.ts
+++ b/UI/Web/src/app/library-detail/library-detail.component.ts
@@ -1,10 +1,12 @@
import { Component, HostListener, OnInit } from '@angular/core';
import { Title } from '@angular/platform-browser';
import { ActivatedRoute, Router } from '@angular/router';
-import { take } from 'rxjs/operators';
+import { take, takeWhile } from 'rxjs/operators';
import { BulkSelectionService } from '../cards/bulk-selection.service';
import { UpdateFilterEvent } from '../cards/card-detail-layout/card-detail-layout.component';
import { KEY_CODES } from '../shared/_services/utility.service';
+import { RefreshMetadataEvent } from '../_models/events/refresh-metadata-event';
+import { SeriesAddedEvent } from '../_models/events/series-added-event';
import { Library } from '../_models/library';
import { Pagination } from '../_models/pagination';
import { Series } from '../_models/series';
@@ -12,6 +14,7 @@ import { FilterItem, mangaFormatFilters, SeriesFilter } from '../_models/series-
import { Action, ActionFactoryService, ActionItem } from '../_services/action-factory.service';
import { ActionService } from '../_services/action.service';
import { LibraryService } from '../_services/library.service';
+import { MessageHubService } from '../_services/message-hub.service';
import { SeriesService } from '../_services/series.service';
@Component({
@@ -60,7 +63,7 @@ export class LibraryDetailComponent implements OnInit {
constructor(private route: ActivatedRoute, private router: Router, private seriesService: SeriesService,
private libraryService: LibraryService, private titleService: Title, private actionFactoryService: ActionFactoryService,
- private actionService: ActionService, public bulkSelectionService: BulkSelectionService) {
+ private actionService: ActionService, public bulkSelectionService: BulkSelectionService, private hubService: MessageHubService) {
const routeId = this.route.snapshot.paramMap.get('id');
if (routeId === null) {
this.router.navigateByUrl('/libraries');
@@ -78,7 +81,10 @@ export class LibraryDetailComponent implements OnInit {
}
ngOnInit(): void {
-
+
+ this.hubService.seriesAdded.pipe(takeWhile(event => event.libraryId === this.libraryId)).subscribe((event: SeriesAddedEvent) => {
+ this.loadPage();
+ });
}
@HostListener('document:keydown.shift', ['$event'])
diff --git a/UI/Web/src/app/person-badge/person-badge.component.html b/UI/Web/src/app/person-badge/person-badge.component.html
index fd12496d4..31b2b84bb 100644
--- a/UI/Web/src/app/person-badge/person-badge.component.html
+++ b/UI/Web/src/app/person-badge/person-badge.component.html
@@ -1,5 +1,5 @@
-
\ No newline at end of file
+