Merged develop into main

This commit is contained in:
Joseph Milazzo 2021-10-12 08:21:43 -05:00
commit aa710529f0
151 changed files with 4393 additions and 1703 deletions

View File

@ -15,4 +15,10 @@
<PackageReference Include="NSubstitute" Version="4.2.2" /> <PackageReference Include="NSubstitute" Version="4.2.2" />
</ItemGroup> </ItemGroup>
<ItemGroup>
<None Update="Data\SeriesNamesForNormalization.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project> </Project>

View File

@ -0,0 +1,573 @@
Liar-Game
Your Lie in April
Love Hina
Love Hina
A Chronicle of the Last Pagans
Otherworldly Munchkin - Let's Speedrun the Dungeon with Only 1 HP!
Love Hina
Rokka - Braves of the Six Flowers
Real Account
Bakekano
Yancha Gal no Anjou-san
Moshi Fanren
The Devil Is a Part-Timer!
My Home Hero
Itoshi no Karin
Claymore
Dolls Fall
Dragons Rioting
Tokyo Ghoul - re
Hajime no Ippo
Mahoromatic
DEATHTOPIA
Negima! Neo - Magister Negi Magi
Ichinensei ni Nacchattara
How NOT to Summon a Demon Lord
U12
"Don't Toy With Me, Miss Nagatoro"
Karakai Jouzu no Takagi-san
UQ Holder!
"Ore no Nounai Sentakushi ga, Gakuen Rabukome o Zenryoku de Jama Shite Iru"
Do Chokkyuu Kareshi x Kanojo
Ana Satsujin
Deus Ex Machina
Hidan no Aria
Bokura wa Minna Kawaisou
Epigraph of the Closed Curve
Ibitsu
Rave Master
Lunar Legend Tsukihime
Starving Anonymous
High-Rise Invasion
Fuuka
Dai Dark
Zero no Tsukaima Chevalier
Cells at Work! CODE BLACK
004 Cut Hero
Renjoh Desperado
Himegoto - Juukyuusai No Seifuku
Shark Skin Man and Peach Hip Girl
Tokyo Revengers
Fire Punch
Boarding School Juliet
Mushihime
Sankarea - Undying Love
Hanako and the Terror of Allegory
Mad Chimera World
Kono Subarashii Sekai ni Bakuen wo!
21st Century Boys
Kono Subarashii Sekai ni Shukufuku wo! Megumin Anthology
Konosuba
Iinari
Shimoneta - Manmaru Hen
Ichiban Ushiro No Daimaou
Yamada-kun and the Seven Witches
Busou Shoujo Machiavellism
Negative Happy Chainsaw Edge
Stravaganza - Isai No Hime
Seraph of the End - Vampire Reign 095
Seraph of the End - Vampire Reign 098
Kokkoku - Moment by Moment
Magico
Samurai Harem - Asu no Yoichi
Change123
Shomin Sample
Eureka SeveN
Kekkaishi
Goblin Slayer Side Story - Year One
Yomeiro Choice
Okusama wa Shougakusei
Monster No Goshujin-Sama
Ase To Sekken
How Do We Relationship
Hantsu x Torasshu
Magical Girl Apocalypse
I Am a Hero
Air Gear
Dolly Kill Kill
Blue Exorcist
Kingdom of Z
The Fable
Mairimashita! Iruma-kun
Spy x Family
Goblin Slayer - Brand New Day
Yesterday wo Utatte
Mujaki No Rakuen
Summer Time Rendering
Eureka Seven Gravity Boys and Lifting Girl
06
Domestic Girlfriend
Imperfect Girl
Chrno Crusade
Higurashi no Naku Koro ni Kai - Tsumihoroboshihen
Nande koko ni sensei ga!
Fukukaichou Ganbaru.
Fraction
Kono Subarashii Sekai ni Shukufuku wo! Megumin Anthology Aka
Mouryou no Yurikago
Ral Ω Grad
Shomin Sample I Was Abducted by an Elite All-Girls School as a Sample Commoner
City of Love Prison
Tsugumomo
Highschool of the Dead - Edition
Cynthia The Mission
Amano Megumi wa Suki Darake!
Aria The Scarlet Ammo
Noblesse
Outlanders
Bleach
Kimi ni Todoke
Corpse Party - Another Child
The Heroic Legend of Arslan
Fujiyama-San Wa Shishunki
Let's Go Play
Astra Lost in Space
Mirai Nikki
Doubt
Again!!
Gesellschaft Blume
Momo The Blood Taker
World's End Harem - Fantasia
Tengoku Daimakyou
Amaenaideyo MS
Cage of Eden
Arifureta - From Commonplace to World's Strongest
"The 100 Girlfriends Who Really, Really, Really, Really, Really Love You"
Frogman
Chaika - The Coffin Princess
Pandora Hearts
I'm Not a Lolicon!
Criminale!
Drifting Net Cafe
Kono Subarashii Sekai ni Nichijou wo!
Tomodachi Game
Accel World
Sun-Ken Rock
Parallel Paradise
Otherworldly Munchkin - Let's Speedrun the Dungeon with Only 1 HP!
Hentai Ouji to Warawanai Neko. Nya!
Gokukoku no Brynhildr
Rosario+Vampire Season 2
Higurashi no Naku Koro ni - Tatarigoroshihen
BEASTARS
Grenadier
The Duke of Death and His Black Maid
Helck
Ijousha no Ai
Beelzebub
Infection
"Ota Tomo ga Kareshi ni Nattara, Saikou, Kamo Shirenai"
Battle Vixens
Kimi ha midara na Boku no Joou
Immortal Hounds
Battle Angel Alita
My Monster Secret
Blood Rain
Kakegurui - Compulsive Gambler
Combatants Will Be Dispatched!
Tenjo Tenge - Digital Colored Comics
Dorohedoro
Tower Of God
Toradora!
Spice and Wolf
Loose Relation Between Wizard and Apprentice
Kaguya-sama - Love Is War - Digital Colored Comics
RaW Hero
Aiki
Jagaaaaaan
Gleipnir
Darwin's Game
I'm Standing on a Million Lives
Battle Club
School Rumble Z
Wotakoi - Love Is Hard for Otaku
Majimoji Rurumo
Suisei no Gargantia
Madan No Ou To Vanadis
Full Metal Panic - Sigma
Konosuba - An Explosion on This Wonderful World!
Seraph of the End - Vampire Reign 096
Higurashi no Naku Koro ni - Onikakushihen
Corpse Party Cemetery 0 - Kaibyaku No Ars Moriendi
World's End Harem
Jack Frost
The Men Who Created The Prison School Anime
My Hero Academia
Elfen Lied
Berserk
Witchcraft Works
Chobits 20th Anniversary Edition
Mx0
Youkai Kyoushitsu
Horimiya
Mieruko-chan
Drifters
Suzuka
The Iceblade Magician Rules Over the World
Kaiju No. 8
Yu-Gi-Oh!
"A Story About Treating a Female Knight, Who Has Never Been Treated as a Woman, as a Woman"
Mahoutsukai to Deshi no Futekisetsu na Kankei
Battle Royale
Mato Seihei no Slave
One-Punch Man
Boku No Kokoro No Yabai Yatsu
Doku Mushi
Kuzu no Honkai
Hoshihimemura No Naishobanashi
Knights of Sidonia
Amaenaideyo
Kono Subarashii Sekai ni Shukufuku wo! Spin-off Kono Kamen no Akuma ni Soudan wo!
Killing Bites
Fly Me to the Moon
Tenjo Tenge
D-Princess
7thGARDEN
Sumomomo Momomo
Accel World Dural - Magisa Garden
History's Strongest Disciple Kenichi
Future Diary - Mosaic
DEAD Tube
Kaworu Watashiya - Kodomo no Jikan
Undead Unluck
Black Bullet
Fureru To Kikoeru
Konchuki
Akuma no Riddle - Riddle Story of Devil
Great Teacher Onizuka
Scumbag Loser
Jisatsutou
Boku wa Mari no Naka
Cherry x Cherry
Seraph of the End - Vampire Reign 093
Yumekui Merry - 4-Koma Anthology
Love and Lies
Nisekoi - False Love
Another
My Balls
Akame ga KILL!
Corpse Princess
Needless 0
My Charms Are Wasted On Kuroiwa Medaka
Made in Abyss
Hanako to Guuwa no Tera
Yumekui Merry
Miman Renai
Sundome
Gantz
Accomplishments of the Duke's Daughter
Grimgar of Fantasy and Ash
Dansei Kyoufushou Datta Watashi Ga Av Jouyu Ni Naru Made No Hanashi
Hour of the Zombie
NOiSE
Onani Master Kurosawa
Sekirei
Full Metal Panic
Zero no Tsukaima
Solo Leveling
B Gata H Kei
Shurabara!
DEATH NOTE
Terra Formars
Goblin Slayer
March Story
Nozoki Ana
Youkai Shoujo - Monsuga
Maji de Watashi ni Koi Shinasai!!
"Ore no Nounai Sentakushi ga, Gakuen Rabukome o Zenryoku de Jama Shite Iru H"
Destruction Princess
Mob Psycho 100
Negima!
Zero - The Illust collection of The Familiar of Zero
20th Century Boys
Girls of the Wild's
Bleach - Digital Colored Comics
Taboo Tattoo
Let's Buy The Land And Cultivate In Different World
Oroka na Tenshi wa Akuma to Odoru
Future Diary
Negima! Party Book!
Buso Renkin
Offal Island
Mysterious Girlfriend X
Getsurin ni Kiri Saku
Magi
Uzaki-chan Wants to Hang Out!
A Town Where You Live
WITCH WATCH
Lord Marksman and Vanadis
Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo
Tonari No Furi-San Ga Tonikaku Kowai
Hinowa ga CRUSH!
Tsuredure Children
Dance in the Vampire Bund
Sperman
The Rising Of The Shield Hero
Triage X
Kiruru Kill Me
Hidan no Aria AA
Origin
Senran Kagura - Skirting Shadows
Higurashi no Naku Koro ni - Himatsubushihen
APOSIMZ
Franken Fran
Is This a Zombie
School Rumble
Darker than Black - Shikkoku no Hana
Sweet X Trouble
Close As Neighbors
7SEEDS
Dungeon Seeker
Necromance
Code Breaker
Rokka Braves of the Six Flowers
Prison School
COPPELION
Grand Blue Dreaming
Libidors
Skill of Lure
Pluto - Urasawa x Tezuka
Chibi Vampire
Omamori Himari
"Zoku, Kono Subarashii Sekai ni Bakuen wo!"
"Please Go Home, Akutsu-San!"
Mahoutsukai to Teishi no Futekisetsu na Kankei
Chobits
The Seven Deadly Sins
Black Clover
We Never Learn
Tomogui Kyoushitsu
Tokyo Ghoul
Sweat and Soap
Seraph of the End - Vampire Reign 097
Higurashi no Naku Koro ni Kai - Meakashihen
Children
"Can You Just Die, My Darling"
"Haganai, I Don't Have Many Friends"
Heion Sedai no Idaten-tachi
Baketeriya
Magical Sempai
Ajin - Demi-Human
Kimi wa Midara na Boku no Joou
DearS
Pluto
Lotte no Omocha!
Love Hina
Shoujo Kaitai
El Cazador de la Bruja
Akame ga KILL! ZERO
"Beauty, Sage And The Devil's Sword"
Higurashi no Naku Koro ni - Watanagashihen
Corpse Party - Musume
Getsuyoubi no Tawawa
Trinity Seven
"No Game, No Life"
KanoKari Mythology
Seraph of the End - Vampire Reign 094
Uzumaki
Darling in the FranXX
The Blade Of Evolution-Walking Alone In The Dungeon
BLAME! Master Edition
Fire Force
Toukyou Akazukin
Darker than Black
Karin
Higurashi no Naku Koro ni Kai - Matsuribayashihen
Akazukin
Velvet Kiss
"Kanojo, Okarishimasu"
Teasing Master Takagi-san
The Hentai Prince and the Stony Cat
Corpse Party - Book of Shadows
.hackxxxx
Hachigatsu Kokonoka Boku wa Kimi ni Kuwareru.
Corpse Party - Blood Covered
King Of Thorn
BTOOOM!
Chimamire Sukeban Chainsaw
Seraph of the End - Vampire Reign
Juni Taisen Zodiac War
Masamune-kun's Revenge
How Many Light-Years to Babylon
Midori no Hibi
A Girl on the Shore
Plunderer
School Rumble - Pleasure File
Green WorldZ
Golden Boy
Yuusha ga Shinda!
Kodomo no Jikan
unOrdinary
My Wife is Wagatsuma-san
VanDread
Rosario+Vampire
Kyochuu Rettou
Deadman Wonderland
KILL la KILL
Mushoku Tensei - Jobless Reincarnation
404 Case Manual 30 Seconds Till Apocalypse
Iris Zero
All You Need is Kill
Shimoneta to Iu Gainen ga Sonzai Shinai Taikutsu na Sekai Man-hen
High School DxD
Needless
Ichiban no Daimaou
My Girlfriend Is A Zombie
Hare-Kon
Minamoto-kun Monogatari
Batman Beyond 02
Spawn
iZombie
Invincible 070.5 - Invincible Returns
Invincible Presents - Atom Eve
Invincible 033.5 - Marvel Team-Up
Invincible 031.5 - Image - Future Shock
Batman Wayne Family Adventures
Batman Beyond 04
Batman Beyond 2.0
Batman Beyond 03
Batman Beyond 05
Chew
Zombie Tramp vs. Vampblade TPB
Free Scott Pilgrim
Invincible Presents - Atom Eve & Rex Splode
Scott Pilgrim 03 - Scott Pilgrim & The Infinite Sadness
I Hate Fairyland
Scott Pilgrim 06 - Scott Pilgrim's Finest Hour
Scott Pilgrim 04 - Scott Pilgrim Gets It Together
Scott Pilgrim 01 - Scott Pilgrim's Precious Little Life
Spawn - 25th Anniversary Director's Cut
Zombie Tramp
Invincible Universe
The Official Handbook of the Invincible Universe
Batman Beyond
Saga
Scott Pilgrim 05 - Scott Pilgrim vs. the Universe
Batman Beyond 06
Batman - Detective Comics - Rebirth Deluxe Edition Book
Batman Beyond 01
Batman - Catwoman
Invincible 022.5 - Invincible
Teen Titans - Raven
Invincible 052
Invincible 014.5 - Image Comics Summer
Zombie Tramp v3 TPB
Scott Pilgrim 02 - Scott Pilgrim vs. The World
Invincible
Spawn 220
Y - The Last Man
Kick-Ass - The Dave Lizewski Years
Teen Titans
Fables
Book of Enoch
To Love-Ru Darkness - Digital Colored Comics
Medaka Box - Digital Colored Comics
Magical P tissi re Kosaki-chan!!
Pandora in the Crimson Shell - Ghost Urn
Yuragi-sou no Yuuna-san - Digital Colored Comics
Ziggurat
Tsugumomo - Digital Colored Comics
The War Poems Of Siegfried Sassoon
Rokka - Braves of the Six Flowers
Demon King Daimaou
Blockade Billy
Cujo
The Magicians
The Gunslinger
Danse Macabre
Christine
Fool moon
On Writing
Roadwork
Deep Learning with Python - A Hands-on Introduction
If It Bleeds
Night Shift
Bag of Bones
Dreamcatcher
Desperation
Duma Key
Four Past Midnight
Elevation
The Colorado Kid
The Eyes of the Dragon
Consulting With This Masked Devil!
Gifting the Wonderful World with Blessings!
The Golden Harpoon / Lost Among the Floes
Invaders of the Rokujouma
Cell
Uncollected Stories 2003
Faithful
"Full Dark, No Stars"
Dolores Claiborne
It
Antonio's Tale
Joyland
konosuba
CSHP19
By the Grace of the Gods - LN
EPUB 3 Collection
Talisman
Sword Art Online
The Mist
Insomnia
Hearts In Atlantis
11/22/63
Kono Subarashii Sekai ni Bakuen wo!
In the Tall Grass
Nightmares and Dreamscapes
Eloquent JavaScript
The Bell Jar
Six Stories
Rose Madder
The Stand
The Devil Is a Part-Timer!
Grimgar of Fantasy and Ash
A Chronicle of the Last Pagans
Cycle of the Werewolf
Gifting this Wonderful World With Blessings!
Unit 1. Operations on Numbers.
Firestarter
The Dark Half
Accel World
Love Hina - Volume
Skeleton Crew
Needful Things
Kono Subarashii Sekai ni Syukufuku wo!
Carrie
Thinner
Hentai Ouji to Warawanai Neko
Blaze
Saturn Run
Throttle
Just After Sunset
Gerald's Game
The Regulators
Different Seasons
The Dark Tower
Pet Sematary
The Girl Who Loved Tom Gordon
Ano Orokamono ni mo Kyakkou wo!
From A Buick 8
The Green Mile
"Celebration of Discipline, Special Anniversary Edition"
Combatants Will Be Dispatched!
Kore Wa Zombie Desu Ka
The Shining
The Tatami Galaxy
Salem's Lot
The Tommyknockers
A Face in the Crowd
UR
この素晴らしい世界に祝福を! 9 紅の宿命 【電子特別版】
Outsider
Lisey's Story
Everything's Eventual
Dune
The Dead Zone
Mile 81
Under the Dome
The Long Walk
The Running Man
EPUB3 UNLEASHED 2012
Gifting The Wonderful World With Explosions!
Rage

View File

@ -1,6 +1,4 @@
using System; using System.IO;
using System.IO;
using API.Data;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Parser; using API.Parser;
@ -57,8 +55,8 @@ namespace API.Benchmark
Title = "A Town Where You Live", Title = "A Town Where You Live",
Volumes = "1" Volumes = "1"
}; };
var parsedSeries = _parseScannedFiles.ScanLibrariesForSeries(LibraryType.Manga, new string[] {libraryPath}, _parseScannedFiles.ScanLibrariesForSeries(LibraryType.Manga, new [] {libraryPath},
out var totalFiles, out var scanElapsedTime); out _, out _);
_parseScannedFiles.MergeName(p1); _parseScannedFiles.MergeName(p1);
} }
} }

View File

@ -0,0 +1,90 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Order;
namespace API.Benchmark
{
[MemoryDiagnoser]
[Orderer(SummaryOrderPolicy.FastestToSlowest)]
[RankColumn]
public class ParserBenchmarks
{
private readonly IList<string> _names;
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9]",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
TimeSpan.FromMilliseconds(300));
private static readonly Regex IsEpub = new Regex(@"\.epub",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
TimeSpan.FromMilliseconds(300));
public ParserBenchmarks()
{
// Read all series from SeriesNamesForNormalization.txt
_names = File.ReadAllLines("Data/SeriesNamesForNormalization.txt");
Console.WriteLine($"Performing benchmark on {_names.Count} series");
}
private static void NormalizeOriginal(string name)
{
Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty);
}
private static void NormalizeNew(string name)
{
// ReSharper disable once UnusedVariable
var ret = NormalizeRegex.Replace(name, string.Empty).ToLower();
}
[Benchmark]
public void TestNormalizeName()
{
foreach (var name in _names)
{
NormalizeOriginal(name);
}
}
[Benchmark]
public void TestNormalizeName_New()
{
foreach (var name in _names)
{
NormalizeNew(name);
}
}
[Benchmark]
public void TestIsEpub()
{
foreach (var name in _names)
{
if ((name + ".epub").ToLower() == ".epub")
{
/* No Operation */
}
}
}
[Benchmark]
public void TestIsEpub_New()
{
foreach (var name in _names)
{
if (IsEpub.IsMatch((name + ".epub")))
{
/* No Operation */
}
}
}
}
}

View File

@ -10,10 +10,12 @@ namespace API.Benchmark
/// </summary> /// </summary>
public static class Program public static class Program
{ {
static void Main(string[] args) private static void Main(string[] args)
{ {
BenchmarkRunner.Run<ParseScannedFilesBenchmarks>(); //BenchmarkRunner.Run<ParseScannedFilesBenchmarks>();
//BenchmarkRunner.Run<TestBenchmark>(); //BenchmarkRunner.Run<TestBenchmark>();
BenchmarkRunner.Run<ParserBenchmarks>();
} }
} }
} }

View File

@ -7,8 +7,8 @@
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="5.0.8" /> <PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="5.0.10" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.10.0" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.11.0" />
<PackageReference Include="NSubstitute" Version="4.2.2" /> <PackageReference Include="NSubstitute" Version="4.2.2" />
<PackageReference Include="xunit" Version="2.4.1" /> <PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.3"> <PackageReference Include="xunit.runner.visualstudio" Version="2.4.3">

View File

@ -10,5 +10,12 @@ namespace API.Tests.Parser
{ {
Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename)); Assert.Equal(expected, API.Parser.Parser.ParseSeries(filename));
} }
[Theory]
[InlineData("Harrison, Kim - Dates from Hell - Hollows Vol 2.5.epub", "2.5")]
public void ParseVolumeTest(string filename, string expected)
{
Assert.Equal(expected, API.Parser.Parser.ParseVolume(filename));
}
} }
} }

View File

@ -1,11 +1,22 @@
using Xunit; using System;
using System.Collections.Generic;
using API.Entities.Enums;
using API.Parser;
using Xunit;
using Xunit.Abstractions;
namespace API.Tests.Parser namespace API.Tests.Parser
{ {
public class ComicParserTests public class ComicParserTests
{ {
private readonly ITestOutputHelper _testOutputHelper;
public ComicParserTests(ITestOutputHelper testOutputHelper)
{
_testOutputHelper = testOutputHelper;
}
[Theory] [Theory]
[InlineData("01 Spider-Man & Wolverine 01.cbr", "Spider-Man & Wolverine")]
[InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")] [InlineData("04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)", "Asterix the Gladiator")]
[InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")] [InlineData("The First Asterix Frieze (WebP by Doc MaKS)", "The First Asterix Frieze")]
[InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")] [InlineData("Batman & Catwoman - Trail of the Gun 01", "Batman & Catwoman - Trail of the Gun")]
@ -28,7 +39,23 @@ namespace API.Tests.Parser
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "Invincible")] [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "Invincible")]
[InlineData("Batman Wayne Family Adventures - Ep. 001 - Moving In", "Batman Wayne Family Adventures")] [InlineData("Batman Wayne Family Adventures - Ep. 001 - Moving In", "Batman Wayne Family Adventures")]
[InlineData("Saga 001 (2012) (Digital) (Empire-Zone).cbr", "Saga")] [InlineData("Saga 001 (2012) (Digital) (Empire-Zone).cbr", "Saga")]
[InlineData("spawn-123", "spawn")]
[InlineData("spawn-chapter-123", "spawn")]
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "Spawn")]
[InlineData("Batman Beyond 04 (of 6) (1999)", "Batman Beyond")] [InlineData("Batman Beyond 04 (of 6) (1999)", "Batman Beyond")]
[InlineData("Batman Beyond 001 (2012)", "Batman Beyond")]
[InlineData("Batman Beyond 2.0 001 (2013)", "Batman Beyond 2.0")]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "Batman - Catwoman")]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "Chew")]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "Chew Script Book")]
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", "Batman - Detective Comics - Rebirth Deluxe Edition Book")]
[InlineData("Cyberpunk 2077 - Your Voice #01", "Cyberpunk 2077 - Your Voice")]
[InlineData("Cyberpunk 2077 #01", "Cyberpunk 2077")]
[InlineData("Cyberpunk 2077 - Trauma Team #04.cbz", "Cyberpunk 2077 - Trauma Team")]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "Batgirl")]
[InlineData("Batgirl V2000 #57", "Batgirl")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire)", "Fables")]
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "2000 AD")]
public void ParseComicSeriesTest(string filename, string expected) public void ParseComicSeriesTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename)); Assert.Equal(expected, API.Parser.Parser.ParseComicSeries(filename));
@ -52,6 +79,20 @@ namespace API.Tests.Parser
[InlineData("Amazing Man Comics chapter 25", "0")] [InlineData("Amazing Man Comics chapter 25", "0")]
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")] [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "0")]
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")] [InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
[InlineData("spawn-123", "0")]
[InlineData("spawn-chapter-123", "0")]
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "0")]
[InlineData("Batman Beyond 04 (of 6) (1999)", "0")]
[InlineData("Batman Beyond 001 (2012)", "0")]
[InlineData("Batman Beyond 2.0 001 (2013)", "0")]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "0")]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "1")]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "2000")]
[InlineData("Batgirl V2000 #57", "2000")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "0")]
[InlineData("Cyberpunk 2077 - Trauma Team 04.cbz", "0")]
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "0")]
public void ParseComicVolumeTest(string filename, string expected) public void ParseComicVolumeTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename)); Assert.Equal(expected, API.Parser.Parser.ParseComicVolume(filename));
@ -77,12 +118,87 @@ namespace API.Tests.Parser
[InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "33.5")] [InlineData("Invincible 033.5 - Marvel Team-Up 14 (2006) (digital) (Minutemen-Slayer)", "33.5")]
[InlineData("Batman Wayne Family Adventures - Ep. 014 - Moving In", "14")] [InlineData("Batman Wayne Family Adventures - Ep. 014 - Moving In", "14")]
[InlineData("Saga 001 (2012) (Digital) (Empire-Zone)", "1")] [InlineData("Saga 001 (2012) (Digital) (Empire-Zone)", "1")]
[InlineData("spawn-123", "123")]
[InlineData("spawn-chapter-123", "123")]
[InlineData("Spawn 062 (1997) (digital) (TLK-EMPIRE-HD).cbr", "62")]
[InlineData("Batman Beyond 04 (of 6) (1999)", "4")] [InlineData("Batman Beyond 04 (of 6) (1999)", "4")]
[InlineData("Invincible 052 (c2c) (2008) (Minutemen-TheCouple)", "52")] [InlineData("Invincible 052 (c2c) (2008) (Minutemen-TheCouple)", "52")]
[InlineData("Y - The Last Man #001", "1")] [InlineData("Y - The Last Man #001", "1")]
[InlineData("Batman Beyond 001 (2012)", "1")]
[InlineData("Batman Beyond 2.0 001 (2013)", "1")]
[InlineData("Batman - Catwoman 001 (2021) (Webrip) (The Last Kryptonian-DCP)", "1")]
[InlineData("Chew v1 - Taster´s Choise (2012) (Digital) (1920) (Kingpin-Empire)", "0")]
[InlineData("Chew Script Book (2011) (digital-Empire) SP04", "0")]
[InlineData("Batgirl Vol.2000 #57 (December, 2004)", "57")]
[InlineData("Batgirl V2000 #57", "57")]
[InlineData("Fables 021 (2004) (Digital) (Nahga-Empire).cbr", "21")]
[InlineData("Cyberpunk 2077 - Trauma Team #04.cbz", "4")]
[InlineData("2000 AD 0366 [1984-04-28] (flopbie)", "366")]
public void ParseComicChapterTest(string filename, string expected) public void ParseComicChapterTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename)); Assert.Equal(expected, API.Parser.Parser.ParseComicChapter(filename));
} }
[Theory]
[InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 02 (2018) (digital) (Son of Ultron-Empire)", true)]
[InlineData("Zombie Tramp vs. Vampblade TPB (2016) (Digital) (TheArchivist-Empire)", true)]
[InlineData("Baldwin the Brave & Other Tales Special SP1.cbr", true)]
[InlineData("Mouse Guard Specials - Spring 1153 - Fraggle Rock FCBD 2010", true)]
public void ParseComicSpecialTest(string input, bool expected)
{
Assert.Equal(expected, !string.IsNullOrEmpty(API.Parser.Parser.ParseComicSpecial(input)));
}
[Fact]
public void ParseInfoTest()
{
const string rootPath = @"E:/Comics/";
var expected = new Dictionary<string, ParserInfo>();
var filepath = @"E:/Comics/Teen Titans/Teen Titans v1 Annual 01 (1967) SP01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Teen Titans", Volumes = "0",
Chapters = "0", Filename = "Teen Titans v1 Annual 01 (1967) SP01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath
});
// Fallback test with bad naming
filepath = @"E:\Comics\Comics\Babe\Babe Vol.1 #1-4\Babe 01.cbr";
expected.Add(filepath, new ParserInfo
{
Series = "Babe", Volumes = "0", Edition = "",
Chapters = "1", Filename = "Babe 01.cbr", Format = MangaFormat.Archive,
FullFilePath = filepath, IsSpecial = false
});
foreach (var file in expected.Keys)
{
var expectedInfo = expected[file];
var actual = API.Parser.Parser.Parse(file, rootPath);
if (expectedInfo == null)
{
Assert.Null(actual);
return;
}
Assert.NotNull(actual);
_testOutputHelper.WriteLine($"Validating {file}");
Assert.Equal(expectedInfo.Format, actual.Format);
_testOutputHelper.WriteLine("Format ✓");
Assert.Equal(expectedInfo.Series, actual.Series);
_testOutputHelper.WriteLine("Series ✓");
Assert.Equal(expectedInfo.Chapters, actual.Chapters);
_testOutputHelper.WriteLine("Chapters ✓");
Assert.Equal(expectedInfo.Volumes, actual.Volumes);
_testOutputHelper.WriteLine("Volumes ✓");
Assert.Equal(expectedInfo.Edition, actual.Edition);
_testOutputHelper.WriteLine("Edition ✓");
Assert.Equal(expectedInfo.Filename, actual.Filename);
_testOutputHelper.WriteLine("Filename ✓");
Assert.Equal(expectedInfo.FullFilePath, actual.FullFilePath);
_testOutputHelper.WriteLine("FullFilePath ✓");
}
}
} }
} }

View File

@ -67,6 +67,7 @@ namespace API.Tests.Parser
[InlineData("X-Men v1 #201 (September 2007).cbz", "1")] [InlineData("X-Men v1 #201 (September 2007).cbz", "1")]
[InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")] [InlineData("Hentai Ouji to Warawanai Neko. - Vol. 06 Ch. 034.5", "6")]
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")] [InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03 Ch. 023.5 - Volume 3 Extras.cbz", "3")]
[InlineData("The 100 Girlfriends Who Really, Really, Really, Really, Really Love You - Vol. 03.5 Ch. 023.5 - Volume 3 Extras.cbz", "3.5")]
public void ParseVolumeTest(string filename, string expected) public void ParseVolumeTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseVolume(filename)); Assert.Equal(expected, API.Parser.Parser.ParseVolume(filename));
@ -238,6 +239,7 @@ namespace API.Tests.Parser
[InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")] [InlineData("Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 1-10", "1-10")]
[InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")] [InlineData("Deku_&_Bakugo_-_Rising_v1_c1.1.cbz", "1.1")]
[InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")] [InlineData("Chapter 63 - The Promise Made for 520 Cenz.cbr", "63")]
[InlineData("Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", "0")]
public void ParseChaptersTest(string filename, string expected) public void ParseChaptersTest(string filename, string expected)
{ {
Assert.Equal(expected, API.Parser.Parser.ParseChapter(filename)); Assert.Equal(expected, API.Parser.Parser.ParseChapter(filename));
@ -291,18 +293,6 @@ namespace API.Tests.Parser
Assert.Equal(expected, API.Parser.Parser.ParseMangaSpecial(inputFile)); Assert.Equal(expected, API.Parser.Parser.ParseMangaSpecial(inputFile));
} }
/*
private static ParserInfo CreateParserInfo(string series, string chapter, string volume, bool isSpecial = false)
{
return new ParserInfo()
{
Chapters = chapter,
Volumes = volume,
IsSpecial = isSpecial,
Series = series,
};
}
*/
[Theory] [Theory]
[InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")] [InlineData("/manga/Btooom!/Vol.1/Chapter 1/1.cbz", "Btooom!~1~1")]
@ -436,6 +426,14 @@ namespace API.Tests.Parser
FullFilePath = filepath, IsSpecial = false FullFilePath = filepath, IsSpecial = false
}); });
filepath = @"E:\Manga\Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub";
expected.Add(filepath, new ParserInfo
{
Series = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows", Volumes = "2.5", Edition = "",
Chapters = "0", Filename = "Harrison, Kim - The Good, The Bad, and the Undead - Hollows Vol 2.5.epub", Format = MangaFormat.Epub,
FullFilePath = filepath, IsSpecial = false
});
// If an image is cover exclusively, ignore it // If an image is cover exclusively, ignore it
filepath = @"E:\Manga\Seraph of the End\cover.png"; filepath = @"E:\Manga\Seraph of the End\cover.png";
expected.Add(filepath, null); expected.Add(filepath, null);

View File

@ -11,6 +11,7 @@ namespace API.Tests.Parser
[InlineData("Beastars SP01", true)] [InlineData("Beastars SP01", true)]
[InlineData("Beastars Special 01", false)] [InlineData("Beastars Special 01", false)]
[InlineData("Beastars Extra 01", false)] [InlineData("Beastars Extra 01", false)]
[InlineData("Batman Beyond - Return of the Joker (2001) SP01", true)]
public void HasSpecialTest(string input, bool expected) public void HasSpecialTest(string input, bool expected)
{ {
Assert.Equal(expected, HasSpecialMarker(input)); Assert.Equal(expected, HasSpecialMarker(input));
@ -35,14 +36,15 @@ namespace API.Tests.Parser
} }
[Theory] [Theory]
[InlineData("Hello_I_am_here", "Hello I am here")] [InlineData("Hello_I_am_here", false, "Hello I am here")]
[InlineData("Hello_I_am_here ", "Hello I am here")] [InlineData("Hello_I_am_here ", false, "Hello I am here")]
[InlineData("[ReleaseGroup] The Title", "The Title")] [InlineData("[ReleaseGroup] The Title", false, "The Title")]
[InlineData("[ReleaseGroup]_The_Title", "The Title")] [InlineData("[ReleaseGroup]_The_Title", false, "The Title")]
[InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", "Kasumi Otoko no Ko v1.1")] [InlineData("[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1", false, "Kasumi Otoko no Ko v1.1")]
public void CleanTitleTest(string input, string expected) [InlineData("Batman - Detective Comics - Rebirth Deluxe Edition Book 04 (2019) (digital) (Son of Ultron-Empire)", true, "Batman - Detective Comics - Rebirth Deluxe Edition")]
public void CleanTitleTest(string input, bool isComic, string expected)
{ {
Assert.Equal(expected, CleanTitle(input)); Assert.Equal(expected, CleanTitle(input, isComic));
} }
@ -54,7 +56,7 @@ namespace API.Tests.Parser
// public void ReplaceStyleUrlTest(string input, string expected) // public void ReplaceStyleUrlTest(string input, string expected)
// { // {
// var replacementStr = "PaytoneOne.ttf"; // var replacementStr = "PaytoneOne.ttf";
// // TODO: Use Match to validate since replace is weird // // Use Match to validate since replace is weird
// //Assert.Equal(expected, FontSrcUrlRegex.Replace(input, "$1" + replacementStr + "$2" + "$3")); // //Assert.Equal(expected, FontSrcUrlRegex.Replace(input, "$1" + replacementStr + "$2" + "$3"));
// var match = FontSrcUrlRegex.Match(input); // var match = FontSrcUrlRegex.Match(input);
// Assert.Equal(!string.IsNullOrEmpty(expected), FontSrcUrlRegex.Match(input).Success); // Assert.Equal(!string.IsNullOrEmpty(expected), FontSrcUrlRegex.Match(input).Success);
@ -98,33 +100,6 @@ namespace API.Tests.Parser
Assert.Equal(expected, IsEpub(input)); Assert.Equal(expected, IsEpub(input));
} }
// [Theory]
// [InlineData("Tenjou Tenge Omnibus", "Omnibus")]
// [InlineData("Tenjou Tenge {Full Contact Edition}", "Full Contact Edition")]
// [InlineData("Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz", "Full Contact Edition")]
// [InlineData("Wotakoi - Love is Hard for Otaku Omnibus v01 (2018) (Digital) (danke-Empire)", "Omnibus")]
// [InlineData("To Love Ru v01 Uncensored (Ch.001-007)", "Uncensored")]
// [InlineData("Chobits Omnibus Edition v01 [Dark Horse]", "Omnibus Edition")]
// [InlineData("[dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz", "Digital Colored Comics")]
// [InlineData("AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz", "Full Color")]
// public void ParseEditionTest(string input, string expected)
// {
// Assert.Equal(expected, ParseEdition(input));
// }
// [Theory]
// [InlineData("Beelzebub Special OneShot - Minna no Kochikame x Beelzebub (2016) [Mangastream].cbz", true)]
// [InlineData("Beelzebub_Omake_June_2012_RHS", true)]
// [InlineData("Beelzebub_Side_Story_02_RHS.zip", false)]
// [InlineData("Darker than Black Shikkoku no Hana Special [Simple Scans].zip", true)]
// [InlineData("Darker than Black Shikkoku no Hana Fanbook Extra [Simple Scans].zip", true)]
// [InlineData("Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Extra Chapter", true)]
// [InlineData("Ani-Hina Art Collection.cbz", true)]
// public void ParseMangaSpecialTest(string input, bool expected)
// {
// Assert.Equal(expected, ParseMangaSpecial(input) != "");
// }
[Theory] [Theory]
[InlineData("12-14", 12)] [InlineData("12-14", 12)]
[InlineData("24", 24)] [InlineData("24", 24)]
@ -142,6 +117,8 @@ namespace API.Tests.Parser
[InlineData("Darker Than Black", "darkerthanblack")] [InlineData("Darker Than Black", "darkerthanblack")]
[InlineData("Darker Than Black - Something", "darkerthanblacksomething")] [InlineData("Darker Than Black - Something", "darkerthanblacksomething")]
[InlineData("Darker Than_Black", "darkerthanblack")] [InlineData("Darker Than_Black", "darkerthanblack")]
[InlineData("Citrus", "citrus")]
[InlineData("Citrus+", "citrus+")]
[InlineData("", "")] [InlineData("", "")]
public void NormalizeTest(string input, string expected) public void NormalizeTest(string input, string expected)
{ {

View File

@ -2,6 +2,7 @@
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using API.Archive; using API.Archive;
using API.Data.Metadata;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using API.Services;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
@ -216,8 +217,30 @@ namespace API.Tests.Services
var archive = Path.Join(testDirectory, "file in folder.zip"); var archive = Path.Join(testDirectory, "file in folder.zip");
var summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?"; var summaryInfo = "By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?";
Assert.Equal(summaryInfo, _archiveService.GetSummaryInfo(archive)); Assert.Equal(summaryInfo, _archiveService.GetComicInfo(archive).Summary);
}
[Fact]
public void CanParseComicInfo()
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/ComicInfos");
var archive = Path.Join(testDirectory, "ComicInfo.zip");
var actual = _archiveService.GetComicInfo(archive);
var expected = new ComicInfo()
{
Publisher = "Yen Press",
Genre = "Manga, Movies & TV",
Summary =
"By all counts, Ryouta Sakamoto is a loser when he's not holed up in his room, bombing things into oblivion in his favorite online action RPG. But his very own uneventful life is blown to pieces when he's abducted and taken to an uninhabited island, where he soon learns the hard way that he's being pitted against others just like him in a explosives-riddled death match! How could this be happening? Who's putting them up to this? And why!? The name, not to mention the objective, of this very real survival game is eerily familiar to Ryouta, who has mastered its virtual counterpart-BTOOOM! Can Ryouta still come out on top when he's playing for his life!?",
PageCount = 194,
LanguageISO = "en",
Notes = "Scraped metadata from Comixology [CMXDB450184]",
Series = "BTOOOM!",
Title = "v01",
Web = "https://www.comixology.com/BTOOOM/digital-comic/450184"
};
Assert.NotStrictEqual(expected, actual);
} }
} }
} }

View File

@ -90,7 +90,7 @@ namespace API.Tests.Services
} }
[Theory] [Theory]
[InlineData(new string[] {"C:/Manga/"}, new string[] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")] [InlineData(new [] {"C:/Manga/"}, new [] {"C:/Manga/Love Hina/Vol. 01.cbz"}, "C:/Manga/Love Hina")]
public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] folders, string expectedDirectory) public void FindHighestDirectoriesFromFilesTest(string[] rootDirectories, string[] folders, string expectedDirectory)
{ {
var actual = DirectoryService.FindHighestDirectoriesFromFiles(rootDirectories, folders); var actual = DirectoryService.FindHighestDirectoriesFromFiles(rootDirectories, folders);

View File

@ -1,13 +1,7 @@
using System; using System;
using System.IO; using System.IO;
using API.Entities; using API.Entities;
using API.Interfaces;
using API.Interfaces.Services;
using API.Services; using API.Services;
using API.SignalR;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging;
using NSubstitute;
using Xunit; using Xunit;
namespace API.Tests.Services namespace API.Tests.Services

View File

@ -111,7 +111,7 @@ namespace API.Tests.Services
Assert.Empty(_scannerService.FindSeriesNotOnDisk(existingSeries, infos)); Assert.Empty(ScannerService.FindSeriesNotOnDisk(existingSeries, infos));
} }

View File

@ -16,6 +16,10 @@
<DocumentationFile>bin\Debug\API.xml</DocumentationFile> <DocumentationFile>bin\Debug\API.xml</DocumentationFile>
</PropertyGroup> </PropertyGroup>
<PropertyGroup>
<SatelliteResourceLanguages>en</SatelliteResourceLanguages>
</PropertyGroup>
<!-- Set the Product and Version info for our own projects --> <!-- Set the Product and Version info for our own projects -->
<PropertyGroup> <PropertyGroup>
<Product>Kavita</Product> <Product>Kavita</Product>
@ -33,39 +37,38 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="8.1.1" /> <PackageReference Include="AutoMapper.Extensions.Microsoft.DependencyInjection" Version="8.1.1" />
<PackageReference Include="Docnet.Core" Version="2.3.1" /> <PackageReference Include="Docnet.Core" Version="2.4.0-alpha.1" />
<PackageReference Include="ExCSS" Version="4.1.0" /> <PackageReference Include="ExCSS" Version="4.1.0" />
<PackageReference Include="Flurl" Version="3.0.2" /> <PackageReference Include="Flurl" Version="3.0.2" />
<PackageReference Include="Flurl.Http" Version="3.2.0" /> <PackageReference Include="Flurl.Http" Version="3.2.0" />
<PackageReference Include="Hangfire" Version="1.7.24" /> <PackageReference Include="Hangfire" Version="1.7.25" />
<PackageReference Include="Hangfire.AspNetCore" Version="1.7.24" /> <PackageReference Include="Hangfire.AspNetCore" Version="1.7.25" />
<PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" /> <PackageReference Include="Hangfire.MaximumConcurrentExecutions" Version="1.1.0" />
<PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" /> <PackageReference Include="Hangfire.MemoryStorage.Core" Version="1.4.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.11.35" /> <PackageReference Include="HtmlAgilityPack" Version="1.11.37" />
<PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" /> <PackageReference Include="MarkdownDeep.NET.Core" Version="1.5.0.4" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.9" /> <PackageReference Include="Microsoft.AspNetCore.Authentication.JwtBearer" Version="5.0.10" />
<PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="5.0.8" /> <PackageReference Include="Microsoft.AspNetCore.Authentication.OpenIdConnect" Version="5.0.10" />
<PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="5.0.8" /> <PackageReference Include="Microsoft.AspNetCore.Identity.EntityFrameworkCore" Version="5.0.10" />
<PackageReference Include="Microsoft.AspNetCore.SignalR" Version="1.1.0" /> <PackageReference Include="Microsoft.AspNetCore.SignalR" Version="1.1.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="5.0.8"> <PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="5.0.10">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference> </PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.8" /> <PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="5.0.10" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.2" /> <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="5.0.2" />
<PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="2.1.3" /> <PackageReference Include="Microsoft.IO.RecyclableMemoryStream" Version="2.1.3" />
<PackageReference Include="NetVips" Version="2.0.1" /> <PackageReference Include="NetVips" Version="2.0.1" />
<PackageReference Include="NetVips.Native" Version="8.11.0" /> <PackageReference Include="NetVips.Native" Version="8.11.4" />
<PackageReference Include="NReco.Logging.File" Version="1.1.2" /> <PackageReference Include="NReco.Logging.File" Version="1.1.2" />
<PackageReference Include="Sentry.AspNetCore" Version="3.8.3" /> <PackageReference Include="SharpCompress" Version="0.30.0" />
<PackageReference Include="SharpCompress" Version="0.29.0" /> <PackageReference Include="SonarAnalyzer.CSharp" Version="8.29.0.36737">
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.27.0.35380">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference> </PackageReference>
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.1.5" /> <PackageReference Include="Swashbuckle.AspNetCore" Version="6.2.2" />
<PackageReference Include="System.Drawing.Common" Version="5.0.2" /> <PackageReference Include="System.Drawing.Common" Version="5.0.2" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="6.12.0" /> <PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="6.12.2" />
<PackageReference Include="VersOne.Epub" Version="3.0.3.1" /> <PackageReference Include="VersOne.Epub" Version="3.0.3.1" />
</ItemGroup> </ItemGroup>

View File

@ -7,10 +7,10 @@ using API.Constants;
using API.DTOs; using API.DTOs;
using API.DTOs.Account; using API.DTOs.Account;
using API.Entities; using API.Entities;
using API.Errors;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services;
using AutoMapper; using AutoMapper;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
@ -31,13 +31,14 @@ namespace API.Controllers
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ILogger<AccountController> _logger; private readonly ILogger<AccountController> _logger;
private readonly IMapper _mapper; private readonly IMapper _mapper;
private readonly IAccountService _accountService;
/// <inheritdoc /> /// <inheritdoc />
public AccountController(UserManager<AppUser> userManager, public AccountController(UserManager<AppUser> userManager,
SignInManager<AppUser> signInManager, SignInManager<AppUser> signInManager,
ITokenService tokenService, IUnitOfWork unitOfWork, ITokenService tokenService, IUnitOfWork unitOfWork,
ILogger<AccountController> logger, ILogger<AccountController> logger,
IMapper mapper) IMapper mapper, IAccountService accountService)
{ {
_userManager = userManager; _userManager = userManager;
_signInManager = signInManager; _signInManager = signInManager;
@ -45,6 +46,7 @@ namespace API.Controllers
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_logger = logger; _logger = logger;
_mapper = mapper; _mapper = mapper;
_accountService = accountService;
} }
/// <summary> /// <summary>
@ -61,30 +63,10 @@ namespace API.Controllers
if (resetPasswordDto.UserName != User.GetUsername() && !User.IsInRole(PolicyConstants.AdminRole)) if (resetPasswordDto.UserName != User.GetUsername() && !User.IsInRole(PolicyConstants.AdminRole))
return Unauthorized("You are not permitted to this operation."); return Unauthorized("You are not permitted to this operation.");
// Validate Password var errors = await _accountService.ChangeUserPassword(user, resetPasswordDto.Password);
foreach (var validator in _userManager.PasswordValidators) if (errors.Any())
{ {
var validationResult = await validator.ValidateAsync(_userManager, user, resetPasswordDto.Password); return BadRequest(errors);
if (!validationResult.Succeeded)
{
return BadRequest(
validationResult.Errors.Select(e => new ApiException(400, e.Code, e.Description)));
}
}
var result = await _userManager.RemovePasswordAsync(user);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return BadRequest(result.Errors.Select(e => new ApiException(400, e.Code, e.Description)));
}
result = await _userManager.AddPasswordAsync(user, resetPasswordDto.Password);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return BadRequest(result.Errors.Select(e => new ApiException(400, e.Code, e.Description)));
} }
_logger.LogInformation("{User}'s Password has been reset", resetPasswordDto.UserName); _logger.LogInformation("{User}'s Password has been reset", resetPasswordDto.UserName);
@ -110,6 +92,13 @@ namespace API.Controllers
user.UserPreferences ??= new AppUserPreferences(); user.UserPreferences ??= new AppUserPreferences();
user.ApiKey = HashUtil.ApiKey(); user.ApiKey = HashUtil.ApiKey();
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (!settings.EnableAuthentication && !registerDto.IsAdmin)
{
_logger.LogInformation("User {UserName} is being registered as non-admin with no server authentication. Using default password.", registerDto.Username);
registerDto.Password = AccountService.DefaultPassword;
}
var result = await _userManager.CreateAsync(user, registerDto.Password); var result = await _userManager.CreateAsync(user, registerDto.Password);
if (!result.Succeeded) return BadRequest(result.Errors); if (!result.Succeeded) return BadRequest(result.Errors);
@ -166,6 +155,14 @@ namespace API.Controllers
if (user == null) return Unauthorized("Invalid username"); if (user == null) return Unauthorized("Invalid username");
var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
var settings = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (!settings.EnableAuthentication && !isAdmin)
{
_logger.LogDebug("User {UserName} is logging in with authentication disabled", loginDto.Username);
loginDto.Password = AccountService.DefaultPassword;
}
var result = await _signInManager var result = await _signInManager
.CheckPasswordSignInAsync(user, loginDto.Password, false); .CheckPasswordSignInAsync(user, loginDto.Password, false);

View File

@ -2,13 +2,11 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Constants;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
namespace API.Controllers namespace API.Controllers
@ -19,13 +17,11 @@ namespace API.Controllers
public class CollectionController : BaseApiController public class CollectionController : BaseApiController
{ {
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly UserManager<AppUser> _userManager;
/// <inheritdoc /> /// <inheritdoc />
public CollectionController(IUnitOfWork unitOfWork, UserManager<AppUser> userManager) public CollectionController(IUnitOfWork unitOfWork)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_userManager = userManager;
} }
/// <summary> /// <summary>
@ -36,7 +32,7 @@ namespace API.Controllers
public async Task<IEnumerable<CollectionTagDto>> GetAllTags() public async Task<IEnumerable<CollectionTagDto>> GetAllTags()
{ {
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername()); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername());
var isAdmin = await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole); var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
if (isAdmin) if (isAdmin)
{ {
return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); return await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync();

View File

@ -63,7 +63,7 @@ namespace API.Controllers
public async Task<ActionResult> DownloadVolume(int volumeId) public async Task<ActionResult> DownloadVolume(int volumeId)
{ {
var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId); var files = await _unitOfWork.VolumeRepository.GetFilesForVolume(volumeId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeByIdAsync(volumeId); var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(volumeId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
try try
{ {
@ -92,7 +92,7 @@ namespace API.Controllers
{ {
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);
var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId); var chapter = await _unitOfWork.ChapterRepository.GetChapterAsync(chapterId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeByIdAsync(chapter.VolumeId); var volume = await _unitOfWork.VolumeRepository.GetVolumeByIdAsync(chapter.VolumeId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);
try try
{ {

View File

@ -1,12 +1,9 @@
using System; using System.IO;
using System.IO;
using System.Net;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
using API.Services; using API.Services;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.Net.Http.Headers;
namespace API.Controllers namespace API.Controllers
{ {

View File

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
@ -179,7 +180,7 @@ namespace API.Controllers
try try
{ {
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
_unitOfWork.LibraryRepository.Delete(library); _unitOfWork.LibraryRepository.Delete(library);
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
@ -203,7 +204,7 @@ namespace API.Controllers
[HttpPost("update")] [HttpPost("update")]
public async Task<ActionResult> UpdateLibrary(UpdateLibraryDto libraryForUserDto) public async Task<ActionResult> UpdateLibrary(UpdateLibraryDto libraryForUserDto)
{ {
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryForUserDto.Id, LibraryIncludes.Folders);
var originalFolders = library.Folders.Select(x => x.Path).ToList(); var originalFolders = library.Folders.Select(x => x.Path).ToList();

View File

@ -5,7 +5,6 @@ using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Xml.Serialization; using System.Xml.Serialization;
using API.Comparators; using API.Comparators;
using API.Constants;
using API.DTOs; using API.DTOs;
using API.DTOs.Filtering; using API.DTOs.Filtering;
using API.DTOs.OPDS; using API.DTOs.OPDS;
@ -16,7 +15,6 @@ using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services; using API.Services;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
namespace API.Controllers namespace API.Controllers
@ -26,7 +24,6 @@ namespace API.Controllers
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IDownloadService _downloadService; private readonly IDownloadService _downloadService;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly UserManager<AppUser> _userManager;
private readonly ICacheService _cacheService; private readonly ICacheService _cacheService;
private readonly IReaderService _readerService; private readonly IReaderService _readerService;
@ -41,13 +38,12 @@ namespace API.Controllers
private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer(); private readonly ChapterSortComparer _chapterSortComparer = new ChapterSortComparer();
public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService, public OpdsController(IUnitOfWork unitOfWork, IDownloadService downloadService,
IDirectoryService directoryService, UserManager<AppUser> userManager, IDirectoryService directoryService, ICacheService cacheService,
ICacheService cacheService, IReaderService readerService) IReaderService readerService)
{ {
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_downloadService = downloadService; _downloadService = downloadService;
_directoryService = directoryService; _directoryService = directoryService;
_userManager = userManager;
_cacheService = cacheService; _cacheService = cacheService;
_readerService = readerService; _readerService = readerService;
@ -170,16 +166,16 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server"); return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey); var userId = await GetUser(apiKey);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var isAdmin = await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole); var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
IEnumerable <CollectionTagDto> tags; IList<CollectionTagDto> tags;
if (isAdmin) if (isAdmin)
{ {
tags = await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync(); tags = (await _unitOfWork.CollectionTagRepository.GetAllTagDtosAsync()).ToList();
} }
else else
{ {
tags = await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync(); tags = (await _unitOfWork.CollectionTagRepository.GetAllPromotedTagDtosAsync()).ToList();
} }
@ -201,6 +197,14 @@ namespace API.Controllers
}); });
} }
if (tags.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed)); return CreateXmlResult(SerializeXml(feed));
} }
@ -213,7 +217,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server"); return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey); var userId = await GetUser(apiKey);
var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId); var user = await _unitOfWork.UserRepository.GetUserByIdAsync(userId);
var isAdmin = await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole); var isAdmin = await _unitOfWork.UserRepository.IsUserAdmin(user);
IEnumerable <CollectionTagDto> tags; IEnumerable <CollectionTagDto> tags;
if (isAdmin) if (isAdmin)
@ -300,13 +304,13 @@ namespace API.Controllers
var feed = CreateFeed(readingList.Title + " Reading List", $"{apiKey}/reading-list/{readingListId}", apiKey); var feed = CreateFeed(readingList.Title + " Reading List", $"{apiKey}/reading-list/{readingListId}", apiKey);
var items = await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId); var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemDtosByIdAsync(readingListId, userId)).ToList();
foreach (var item in items) foreach (var item in items)
{ {
feed.Entries.Add(new FeedEntry() feed.Entries.Add(new FeedEntry()
{ {
Id = item.ChapterId.ToString(), Id = item.ChapterId.ToString(),
Title = "Chapter " + item.ChapterNumber, Title = $"{item.SeriesName} Chapter {item.ChapterNumber}",
Links = new List<FeedLink>() Links = new List<FeedLink>()
{ {
CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{item.SeriesId}/volume/{item.VolumeId}/chapter/{item.ChapterId}"), CreateLink(FeedLinkRelation.SubSection, FeedLinkType.AtomNavigation, Prefix + $"{apiKey}/series/{item.SeriesId}/volume/{item.VolumeId}/chapter/{item.ChapterId}"),
@ -315,6 +319,14 @@ namespace API.Controllers
}); });
} }
if (items.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed)); return CreateXmlResult(SerializeXml(feed));
@ -373,6 +385,14 @@ namespace API.Controllers
feed.Entries.Add(CreateSeries(seriesDto, apiKey)); feed.Entries.Add(CreateSeries(seriesDto, apiKey));
} }
if (recentlyAdded.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed)); return CreateXmlResult(SerializeXml(feed));
} }
@ -404,6 +424,14 @@ namespace API.Controllers
feed.Entries.Add(CreateSeries(seriesDto, apiKey)); feed.Entries.Add(CreateSeries(seriesDto, apiKey));
} }
if (pagedList.Count == 0)
{
feed.Entries.Add(new FeedEntry()
{
Title = "Nothing here",
});
}
return CreateXmlResult(SerializeXml(feed)); return CreateXmlResult(SerializeXml(feed));
} }
@ -467,7 +495,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server"); return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey); var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
var volumes = await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId); var volumes = await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId);
var feed = CreateFeed(series.Name + " - Volumes", $"{apiKey}/series/{series.Id}", apiKey); var feed = CreateFeed(series.Name + " - Volumes", $"{apiKey}/series/{series.Id}", apiKey);
feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesId}")); feed.Links.Add(CreateLink(FeedLinkRelation.Image, FeedLinkType.Image, $"/api/image/series-cover?seriesId={seriesId}"));
foreach (var volumeDto in volumes) foreach (var volumeDto in volumes)
@ -486,7 +514,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server"); return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey); var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
var chapters = var chapters =
(await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number), (await _unitOfWork.ChapterRepository.GetChaptersAsync(volumeId)).OrderBy(x => double.Parse(x.Number),
_chapterSortComparer); _chapterSortComparer);
@ -517,7 +545,7 @@ namespace API.Controllers
return BadRequest("OPDS is not enabled on this server"); return BadRequest("OPDS is not enabled on this server");
var userId = await GetUser(apiKey); var userId = await GetUser(apiKey);
var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId); var series = await _unitOfWork.SeriesRepository.GetSeriesDtoByIdAsync(seriesId, userId);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(volumeId); var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(volumeId);
var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId); var chapter = await _unitOfWork.ChapterRepository.GetChapterDtoAsync(chapterId);
var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId); var files = await _unitOfWork.ChapterRepository.GetFilesForChapterAsync(chapterId);

View File

@ -97,7 +97,7 @@ namespace API.Controllers
public async Task<ActionResult> MarkRead(MarkReadDto markReadDto) public async Task<ActionResult> MarkRead(MarkReadDto markReadDto)
{ {
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId); var volumes = await _unitOfWork.VolumeRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List<AppUserProgress>(); user.Progresses ??= new List<AppUserProgress>();
foreach (var volume in volumes) foreach (var volume in volumes)
{ {
@ -125,7 +125,7 @@ namespace API.Controllers
public async Task<ActionResult> MarkUnread(MarkReadDto markReadDto) public async Task<ActionResult> MarkUnread(MarkReadDto markReadDto)
{ {
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
var volumes = await _unitOfWork.SeriesRepository.GetVolumes(markReadDto.SeriesId); var volumes = await _unitOfWork.VolumeRepository.GetVolumes(markReadDto.SeriesId);
user.Progresses ??= new List<AppUserProgress>(); user.Progresses ??= new List<AppUserProgress>();
foreach (var volume in volumes) foreach (var volume in volumes)
{ {
@ -267,7 +267,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
user.Progresses ??= new List<AppUserProgress>(); user.Progresses ??= new List<AppUserProgress>();
var volumes = await _unitOfWork.SeriesRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true); var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
foreach (var volume in volumes) foreach (var volume in volumes)
{ {
_readerService.MarkChaptersAsRead(user, volume.SeriesId, volume.Chapters); _readerService.MarkChaptersAsRead(user, volume.SeriesId, volume.Chapters);
@ -294,7 +294,7 @@ namespace API.Controllers
var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress); var user = await _unitOfWork.UserRepository.GetUserByUsernameAsync(User.GetUsername(), AppUserIncludes.Progress);
user.Progresses ??= new List<AppUserProgress>(); user.Progresses ??= new List<AppUserProgress>();
var volumes = await _unitOfWork.SeriesRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true); var volumes = await _unitOfWork.VolumeRepository.GetVolumesForSeriesAsync(dto.SeriesIds.ToArray(), true);
foreach (var volume in volumes) foreach (var volume in volumes)
{ {
_readerService.MarkChaptersAsUnread(user, volume.SeriesId, volume.Chapters); _readerService.MarkChaptersAsUnread(user, volume.SeriesId, volume.Chapters);

View File

@ -1,4 +1,5 @@
using System.Collections.Generic; using System;
using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
@ -99,16 +100,20 @@ namespace API.Controllers
[HttpPost("delete-item")] [HttpPost("delete-item")]
public async Task<ActionResult> DeleteListItem(UpdateReadingListPosition dto) public async Task<ActionResult> DeleteListItem(UpdateReadingListPosition dto)
{ {
var items = (await _unitOfWork.ReadingListRepository.GetReadingListItemsByIdAsync(dto.ReadingListId)).ToList(); var readingList = await _unitOfWork.ReadingListRepository.GetReadingListByIdAsync(dto.ReadingListId);
var item = items.Find(r => r.Id == dto.ReadingListItemId); readingList.Items = readingList.Items.Where(r => r.Id != dto.ReadingListItemId).ToList();
items.Remove(item);
for (var i = 0; i < items.Count; i++)
var index = 0;
foreach (var readingListItem in readingList.Items)
{ {
items[i].Order = i; readingListItem.Order = index;
index++;
} }
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) if (!_unitOfWork.HasChanges()) return Ok();
if (await _unitOfWork.CommitAsync())
{ {
return Ok("Updated"); return Ok("Updated");
} }
@ -138,15 +143,10 @@ namespace API.Controllers
itemIdsToRemove.Contains(r.Id)); itemIdsToRemove.Contains(r.Id));
_unitOfWork.ReadingListRepository.BulkRemove(listItems); _unitOfWork.ReadingListRepository.BulkRemove(listItems);
if (_unitOfWork.HasChanges()) if (!_unitOfWork.HasChanges()) return Ok("Nothing to remove");
{
await _unitOfWork.CommitAsync(); await _unitOfWork.CommitAsync();
return Ok("Updated"); return Ok("Updated");
}
else
{
return Ok("Nothing to remove");
}
} }
catch catch
{ {
@ -437,7 +437,7 @@ namespace API.Controllers
var existingChapterExists = readingList.Items.Select(rli => rli.ChapterId).ToHashSet(); var existingChapterExists = readingList.Items.Select(rli => rli.ChapterId).ToHashSet();
var chaptersForSeries = (await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds)) var chaptersForSeries = (await _unitOfWork.ChapterRepository.GetChaptersByIdsAsync(chapterIds))
.OrderBy(c => int.Parse(c.Volume.Name)) .OrderBy(c => float.Parse(c.Volume.Name))
.ThenBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting); .ThenBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting);
var index = lastOrder + 1; var index = lastOrder + 1;

View File

@ -10,9 +10,11 @@ using API.Entities;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces; using API.Interfaces;
using API.SignalR;
using Kavita.Common; using Kavita.Common;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.SignalR;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
namespace API.Controllers namespace API.Controllers
@ -22,12 +24,14 @@ namespace API.Controllers
private readonly ILogger<SeriesController> _logger; private readonly ILogger<SeriesController> _logger;
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly IHubContext<MessageHub> _messageHub;
public SeriesController(ILogger<SeriesController> logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork) public SeriesController(ILogger<SeriesController> logger, ITaskScheduler taskScheduler, IUnitOfWork unitOfWork, IHubContext<MessageHub> messageHub)
{ {
_logger = logger; _logger = logger;
_taskScheduler = taskScheduler; _taskScheduler = taskScheduler;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_messageHub = messageHub;
} }
[HttpPost] [HttpPost]
@ -97,14 +101,14 @@ namespace API.Controllers
public async Task<ActionResult<IEnumerable<VolumeDto>>> GetVolumes(int seriesId) public async Task<ActionResult<IEnumerable<VolumeDto>>> GetVolumes(int seriesId)
{ {
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)); return Ok(await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId));
} }
[HttpGet("volume")] [HttpGet("volume")]
public async Task<ActionResult<VolumeDto>> GetVolume(int volumeId) public async Task<ActionResult<VolumeDto>> GetVolume(int volumeId)
{ {
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
return Ok(await _unitOfWork.SeriesRepository.GetVolumeDtoAsync(volumeId, userId)); return Ok(await _unitOfWork.VolumeRepository.GetVolumeDtoAsync(volumeId, userId));
} }
[HttpGet("chapter")] [HttpGet("chapter")]
@ -217,7 +221,7 @@ namespace API.Controllers
[HttpPost("refresh-metadata")] [HttpPost("refresh-metadata")]
public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto) public ActionResult RefreshSeriesMetadata(RefreshSeriesDto refreshSeriesDto)
{ {
_taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId); _taskScheduler.RefreshSeriesMetadata(refreshSeriesDto.LibraryId, refreshSeriesDto.SeriesId, true);
return Ok(); return Ok();
} }
@ -296,6 +300,12 @@ namespace API.Controllers
if (await _unitOfWork.CommitAsync()) if (await _unitOfWork.CommitAsync())
{ {
foreach (var tag in updateSeriesMetadataDto.Tags)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAddedToCollection,
MessageFactory.SeriesAddedToCollection(tag.Id,
updateSeriesMetadataDto.SeriesMetadata.SeriesId));
}
return Ok("Successfully updated"); return Ok("Successfully updated");
} }
} }

View File

@ -3,11 +3,13 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs.Settings;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers.Converters; using API.Helpers.Converters;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services;
using API.Services;
using Kavita.Common; using Kavita.Common;
using Kavita.Common.Extensions; using Kavita.Common.Extensions;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
@ -21,12 +23,22 @@ namespace API.Controllers
private readonly ILogger<SettingsController> _logger; private readonly ILogger<SettingsController> _logger;
private readonly IUnitOfWork _unitOfWork; private readonly IUnitOfWork _unitOfWork;
private readonly ITaskScheduler _taskScheduler; private readonly ITaskScheduler _taskScheduler;
private readonly IAccountService _accountService;
public SettingsController(ILogger<SettingsController> logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler) public SettingsController(ILogger<SettingsController> logger, IUnitOfWork unitOfWork, ITaskScheduler taskScheduler, IAccountService accountService)
{ {
_logger = logger; _logger = logger;
_unitOfWork = unitOfWork; _unitOfWork = unitOfWork;
_taskScheduler = taskScheduler; _taskScheduler = taskScheduler;
_accountService = accountService;
}
[AllowAnonymous]
[HttpGet("base-url")]
public async Task<ActionResult<string>> GetBaseUrl()
{
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
return Ok(settingsDto.BaseUrl);
} }
[Authorize(Policy = "RequireAdminRole")] [Authorize(Policy = "RequireAdminRole")]
@ -57,6 +69,7 @@ namespace API.Controllers
// We do not allow CacheDirectory changes, so we will ignore. // We do not allow CacheDirectory changes, so we will ignore.
var currentSettings = await _unitOfWork.SettingsRepository.GetSettingsAsync(); var currentSettings = await _unitOfWork.SettingsRepository.GetSettingsAsync();
var updateAuthentication = false;
foreach (var setting in currentSettings) foreach (var setting in currentSettings)
{ {
@ -80,6 +93,18 @@ namespace API.Controllers
_unitOfWork.SettingsRepository.Update(setting); _unitOfWork.SettingsRepository.Update(setting);
} }
if (setting.Key == ServerSettingKey.BaseUrl && updateSettingsDto.BaseUrl + string.Empty != setting.Value)
{
var path = !updateSettingsDto.BaseUrl.StartsWith("/")
? $"/{updateSettingsDto.BaseUrl}"
: updateSettingsDto.BaseUrl;
path = !path.EndsWith("/")
? $"{path}/"
: path;
setting.Value = path;
_unitOfWork.SettingsRepository.Update(setting);
}
if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + string.Empty != setting.Value) if (setting.Key == ServerSettingKey.LoggingLevel && updateSettingsDto.LoggingLevel + string.Empty != setting.Value)
{ {
setting.Value = updateSettingsDto.LoggingLevel + string.Empty; setting.Value = updateSettingsDto.LoggingLevel + string.Empty;
@ -93,6 +118,13 @@ namespace API.Controllers
_unitOfWork.SettingsRepository.Update(setting); _unitOfWork.SettingsRepository.Update(setting);
} }
if (setting.Key == ServerSettingKey.EnableAuthentication && updateSettingsDto.EnableAuthentication + string.Empty != setting.Value)
{
setting.Value = updateSettingsDto.EnableAuthentication + string.Empty;
_unitOfWork.SettingsRepository.Update(setting);
updateAuthentication = true;
}
if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + string.Empty != setting.Value) if (setting.Key == ServerSettingKey.AllowStatCollection && updateSettingsDto.AllowStatCollection + string.Empty != setting.Value)
{ {
setting.Value = updateSettingsDto.AllowStatCollection + string.Empty; setting.Value = updateSettingsDto.AllowStatCollection + string.Empty;
@ -110,12 +142,33 @@ namespace API.Controllers
if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated"); if (!_unitOfWork.HasChanges()) return Ok("Nothing was updated");
if (!_unitOfWork.HasChanges() || !await _unitOfWork.CommitAsync()) try
{ {
await _unitOfWork.CommitAsync();
if (updateAuthentication)
{
var users = await _unitOfWork.UserRepository.GetNonAdminUsersAsync();
foreach (var user in users)
{
var errors = await _accountService.ChangeUserPassword(user, AccountService.DefaultPassword);
if (!errors.Any()) continue;
await _unitOfWork.RollbackAsync();
return BadRequest(errors);
}
_logger.LogInformation("Server authentication changed. Updated all non-admins to default password");
}
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an exception when updating server settings");
await _unitOfWork.RollbackAsync(); await _unitOfWork.RollbackAsync();
return BadRequest("There was a critical issue. Please try again."); return BadRequest("There was a critical issue. Please try again.");
} }
_logger.LogInformation("Server Settings updated"); _logger.LogInformation("Server Settings updated");
_taskScheduler.ScheduleTasks(); _taskScheduler.ScheduleTasks();
return Ok(updateSettingsDto); return Ok(updateSettingsDto);
@ -148,5 +201,12 @@ namespace API.Controllers
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync(); var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
return Ok(settingsDto.EnableOpds); return Ok(settingsDto.EnableOpds);
} }
[HttpGet("authentication-enabled")]
public async Task<ActionResult<bool>> GetAuthenticationEnabled()
{
var settingsDto = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
return Ok(settingsDto.EnableAuthentication);
}
} }
} }

View File

@ -148,7 +148,7 @@ namespace API.Controllers
chapter.CoverImage = filePath; chapter.CoverImage = filePath;
chapter.CoverImageLocked = true; chapter.CoverImageLocked = true;
_unitOfWork.ChapterRepository.Update(chapter); _unitOfWork.ChapterRepository.Update(chapter);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId); var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId);
volume.CoverImage = chapter.CoverImage; volume.CoverImage = chapter.CoverImage;
_unitOfWork.VolumeRepository.Update(volume); _unitOfWork.VolumeRepository.Update(volume);
} }
@ -185,7 +185,7 @@ namespace API.Controllers
chapter.CoverImage = string.Empty; chapter.CoverImage = string.Empty;
chapter.CoverImageLocked = false; chapter.CoverImageLocked = false;
_unitOfWork.ChapterRepository.Update(chapter); _unitOfWork.ChapterRepository.Update(chapter);
var volume = await _unitOfWork.SeriesRepository.GetVolumeAsync(chapter.VolumeId); var volume = await _unitOfWork.VolumeRepository.GetVolumeAsync(chapter.VolumeId);
volume.CoverImage = chapter.CoverImage; volume.CoverImage = chapter.CoverImage;
_unitOfWork.VolumeRepository.Update(volume); _unitOfWork.VolumeRepository.Update(volume);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId); var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(volume.SeriesId);

View File

@ -1,6 +1,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.Extensions; using API.Extensions;
using API.Interfaces; using API.Interfaces;
@ -38,11 +39,24 @@ namespace API.Controllers
return Ok(await _unitOfWork.UserRepository.GetMembersAsync()); return Ok(await _unitOfWork.UserRepository.GetMembersAsync());
} }
[AllowAnonymous]
[HttpGet("names")]
public async Task<ActionResult<IEnumerable<MemberDto>>> GetUserNames()
{
var setting = await _unitOfWork.SettingsRepository.GetSettingsDtoAsync();
if (setting.EnableAuthentication)
{
return Unauthorized("This API cannot be used given your server's configuration");
}
var members = await _unitOfWork.UserRepository.GetMembersAsync();
return Ok(members.Select(m => m.Username));
}
[HttpGet("has-reading-progress")] [HttpGet("has-reading-progress")]
public async Task<ActionResult<bool>> HasReadingProgress(int libraryId) public async Task<ActionResult<bool>> HasReadingProgress(int libraryId)
{ {
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId);
var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername()); var userId = await _unitOfWork.UserRepository.GetUserIdByUsernameAsync(User.GetUsername());
var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, userId)); return Ok(await _unitOfWork.AppUserProgressRepository.UserHasProgress(library.Type, userId));
} }

View File

@ -1,8 +1,8 @@
namespace API.DTOs namespace API.DTOs.Account
{ {
public class LoginDto public class LoginDto
{ {
public string Username { get; init; } public string Username { get; init; }
public string Password { get; init; } public string Password { get; set; }
} }
} }

View File

@ -1,4 +1,5 @@
using System.Collections.Generic; using System;
using System.Collections.Generic;
using API.Entities.Enums; using API.Entities.Enums;
namespace API.DTOs namespace API.DTOs
@ -7,8 +8,11 @@ namespace API.DTOs
{ {
public int Id { get; init; } public int Id { get; init; }
public string Name { get; init; } public string Name { get; init; }
public string CoverImage { get; init; } /// <summary>
/// Last time Library was scanned
/// </summary>
public DateTime LastScanned { get; init; }
public LibraryType Type { get; init; } public LibraryType Type { get; init; }
public ICollection<string> Folders { get; init; } public ICollection<string> Folders { get; init; }
} }
} }

View File

@ -23,7 +23,7 @@ namespace API.DTOs.OPDS
public string Title { get; set; } public string Title { get; set; }
[XmlAttribute("count", Namespace = "http://vaemendis.net/opds-pse/ns")] [XmlAttribute("count", Namespace = "http://vaemendis.net/opds-pse/ns")]
public int TotalPages { get; set; } = 0; public int TotalPages { get; set; }
public bool ShouldSerializeTotalPages() public bool ShouldSerializeTotalPages()
{ {

View File

@ -1,5 +1,4 @@
using API.Entities.Enums; using API.Entities.Enums;
using Newtonsoft.Json;
namespace API.DTOs.Reader namespace API.DTOs.Reader
{ {

View File

@ -8,7 +8,7 @@ namespace API.DTOs
public string Username { get; init; } public string Username { get; init; }
[Required] [Required]
[StringLength(32, MinimumLength = 6)] [StringLength(32, MinimumLength = 6)]
public string Password { get; init; } public string Password { get; set; }
public bool IsAdmin { get; init; } public bool IsAdmin { get; init; }
} }
} }

View File

@ -1,4 +1,4 @@
namespace API.DTOs namespace API.DTOs.Settings
{ {
public class ServerSettingDto public class ServerSettingDto
{ {
@ -21,5 +21,14 @@
/// Enables OPDS connections to be made to the server. /// Enables OPDS connections to be made to the server.
/// </summary> /// </summary>
public bool EnableOpds { get; set; } public bool EnableOpds { get; set; }
/// <summary>
/// Enables Authentication on the server. Defaults to true.
/// </summary>
public bool EnableAuthentication { get; set; }
/// <summary>
/// Base Url for the kavita. Requires restart to take effect.
/// </summary>
public string BaseUrl { get; set; }
} }
} }

View File

@ -0,0 +1,51 @@
namespace API.Data.Metadata
{
/// <summary>
/// A representation of a ComicInfo.xml file
/// </summary>
/// <remarks>See reference of the loose spec here: https://github.com/Kussie/ComicInfoStandard/blob/main/ComicInfo.xsd</remarks>
public class ComicInfo
{
public string Summary { get; set; }
public string Title { get; set; }
public string Series { get; set; }
public string Number { get; set; }
public string Volume { get; set; }
public string Notes { get; set; }
public string Genre { get; set; }
public int PageCount { get; set; }
// ReSharper disable once InconsistentNaming
public string LanguageISO { get; set; }
public string Web { get; set; }
public int Month { get; set; }
public int Year { get; set; }
/// <summary>
/// Rating based on the content. Think PG-13, R for movies
/// </summary>
public string AgeRating { get; set; }
/// <summary>
/// User's rating of the content
/// </summary>
public float UserRating { get; set; }
public string AlternateSeries { get; set; }
public string StoryArc { get; set; }
public string SeriesGroup { get; set; }
public string AlternativeSeries { get; set; }
public string AlternativeNumber { get; set; }
/// <summary>
/// This is the Author. For Books, we map creator tag in OPF to this field. Comma separated if multiple.
/// </summary>
public string Writer { get; set; } // TODO: Validate if we should make this a list of writers
public string Penciller { get; set; }
public string Inker { get; set; }
public string Colorist { get; set; }
public string Letterer { get; set; }
public string CoverArtist { get; set; }
public string Editor { get; set; }
public string Publisher { get; set; }
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,25 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
namespace API.Data.Migrations
{
public partial class LastScannedLibrary : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<DateTime>(
name: "LastScanned",
table: "Library",
type: "TEXT",
nullable: false,
defaultValue: new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified));
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "LastScanned",
table: "Library");
}
}
}

View File

@ -397,6 +397,9 @@ namespace API.Data.Migrations
b.Property<DateTime>("LastModified") b.Property<DateTime>("LastModified")
.HasColumnType("TEXT"); .HasColumnType("TEXT");
b.Property<DateTime>("LastScanned")
.HasColumnType("TEXT");
b.Property<string>("Name") b.Property<string>("Name")
.HasColumnType("TEXT"); .HasColumnType("TEXT");

View File

@ -1,7 +1,5 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Linq; using System.Linq;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader; using API.DTOs.Reader;

View File

@ -1,4 +1,5 @@
using System.Collections.Generic; using System;
using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs;
@ -11,6 +12,17 @@ using Microsoft.EntityFrameworkCore;
namespace API.Data.Repositories namespace API.Data.Repositories
{ {
[Flags]
public enum LibraryIncludes
{
None = 1,
Series = 2,
AppUser = 4,
Folders = 8,
// Ratings = 16
}
public class LibraryRepository : ILibraryRepository public class LibraryRepository : ILibraryRepository
{ {
private readonly DataContext _context; private readonly DataContext _context;
@ -58,7 +70,7 @@ namespace API.Data.Repositories
public async Task<bool> DeleteLibrary(int libraryId) public async Task<bool> DeleteLibrary(int libraryId)
{ {
var library = await GetLibraryForIdAsync(libraryId); var library = await GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders | LibraryIncludes.Series);
_context.Library.Remove(library); _context.Library.Remove(library);
return await _context.SaveChangesAsync() > 0; return await _context.SaveChangesAsync() > 0;
} }
@ -91,14 +103,37 @@ namespace API.Data.Repositories
.ToListAsync(); .ToListAsync();
} }
public async Task<Library> GetLibraryForIdAsync(int libraryId) public async Task<Library> GetLibraryForIdAsync(int libraryId, LibraryIncludes includes)
{ {
return await _context.Library
.Where(x => x.Id == libraryId) var query = _context.Library
.Include(f => f.Folders) .Where(x => x.Id == libraryId);
.Include(l => l.Series)
.SingleAsync(); query = AddIncludesToQuery(query, includes);
return await query.SingleAsync();
} }
private static IQueryable<Library> AddIncludesToQuery(IQueryable<Library> query, LibraryIncludes includeFlags)
{
if (includeFlags.HasFlag(LibraryIncludes.Folders))
{
query = query.Include(l => l.Folders);
}
if (includeFlags.HasFlag(LibraryIncludes.Series))
{
query = query.Include(l => l.Series);
}
if (includeFlags.HasFlag(LibraryIncludes.AppUser))
{
query = query.Include(l => l.AppUsers);
}
return query;
}
/// <summary> /// <summary>
/// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed. /// This returns a Library with all it's Series -> Volumes -> Chapters. This is expensive. Should only be called when needed.
/// </summary> /// </summary>
@ -106,7 +141,6 @@ namespace API.Data.Repositories
/// <returns></returns> /// <returns></returns>
public async Task<Library> GetFullLibraryForIdAsync(int libraryId) public async Task<Library> GetFullLibraryForIdAsync(int libraryId)
{ {
return await _context.Library return await _context.Library
.Where(x => x.Id == libraryId) .Where(x => x.Id == libraryId)
.Include(f => f.Folders) .Include(f => f.Folders)

View File

@ -53,7 +53,7 @@ namespace API.Data.Repositories
{ {
return await _context.ReadingList return await _context.ReadingList
.Where(r => r.Id == readingListId) .Where(r => r.Id == readingListId)
.Include(r => r.Items) .Include(r => r.Items.OrderBy(item => item.Order))
.SingleOrDefaultAsync(); .SingleOrDefaultAsync();
} }

View File

@ -1,15 +1,15 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Data.Scanner;
using API.DTOs; using API.DTOs;
using API.DTOs.Filtering; using API.DTOs.Filtering;
using API.Entities; using API.Entities;
using API.Extensions; using API.Extensions;
using API.Helpers; using API.Helpers;
using API.Interfaces.Repositories; using API.Interfaces.Repositories;
using API.Services.Tasks;
using AutoMapper; using AutoMapper;
using AutoMapper.QueryableExtensions; using AutoMapper.QueryableExtensions;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
@ -26,9 +26,9 @@ namespace API.Data.Repositories
_mapper = mapper; _mapper = mapper;
} }
public void Add(Series series) public void Attach(Series series)
{ {
_context.Series.Add(series); _context.Series.Attach(series);
} }
public void Update(Series series) public void Update(Series series)
@ -36,19 +36,9 @@ namespace API.Data.Repositories
_context.Entry(series).State = EntityState.Modified; _context.Entry(series).State = EntityState.Modified;
} }
public async Task<bool> SaveAllAsync() public void Remove(Series series)
{ {
return await _context.SaveChangesAsync() > 0; _context.Series.Remove(series);
}
public bool SaveAll()
{
return _context.SaveChanges() > 0;
}
public async Task<Series> GetSeriesByNameAsync(string name)
{
return await _context.Series.SingleOrDefaultAsync(x => x.Name == name);
} }
public async Task<bool> DoesSeriesNameExistInLibrary(string name) public async Task<bool> DoesSeriesNameExistInLibrary(string name)
@ -64,11 +54,6 @@ namespace API.Data.Repositories
.CountAsync() > 1; .CountAsync() > 1;
} }
public Series GetSeriesByName(string name)
{
return _context.Series.SingleOrDefault(x => x.Name == name);
}
public async Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId) public async Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId)
{ {
return await _context.Series return await _context.Series
@ -77,6 +62,43 @@ namespace API.Data.Repositories
.ToListAsync(); .ToListAsync();
} }
/// <summary>
/// Used for <see cref="ScannerService"/> to
/// </summary>
/// <param name="libraryId"></param>
/// <returns></returns>
public async Task<PagedList<Series>> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams)
{
var query = _context.Series
.Where(s => s.LibraryId == libraryId)
.Include(s => s.Metadata)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.AsSplitQuery()
.OrderBy(s => s.SortName);
return await PagedList<Series>.CreateAsync(query, userParams.PageNumber, userParams.PageSize);
}
/// <summary>
/// This is a heavy call. Returns all entities down to Files and Library and Series Metadata.
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<Series> GetFullSeriesForSeriesIdAsync(int seriesId)
{
return await _context.Series
.Where(s => s.Id == seriesId)
.Include(s => s.Metadata)
.Include(s => s.Library)
.Include(s => s.Volumes)
.ThenInclude(v => v.Chapters)
.ThenInclude(c => c.Files)
.AsSplitQuery()
.SingleOrDefaultAsync();
}
public async Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter) public async Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter)
{ {
var formats = filter.GetSqlFilter(); var formats = filter.GetSqlFilter();
@ -103,41 +125,12 @@ namespace API.Data.Repositories
.ToListAsync(); .ToListAsync();
} }
public async Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId)
{
var volumes = await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.OrderBy(volume => volume.Number)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync();
await AddVolumeModifiers(userId, volumes);
SortSpecialChapters(volumes);
return volumes;
}
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
var sorter = new NaturalSortComparer();
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
{
v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
}
}
public async Task<IEnumerable<Volume>> GetVolumes(int seriesId)
{
return await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.OrderBy(vol => vol.Number)
.ToListAsync();
}
public async Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId) public async Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId)
{ {
@ -151,55 +144,8 @@ namespace API.Data.Repositories
return seriesList[0]; return seriesList[0];
} }
public async Task<Volume> GetVolumeAsync(int volumeId)
{
return await _context.Volume
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
}
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId)
{
return await _context.Volume
.Where(vol => vol.Id == volumeId)
.AsNoTracking()
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync();
}
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId)
{
var volume = await _context.Volume
.Where(vol => vol.Id == volumeId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync(vol => vol.Id == volumeId);
var volumeList = new List<VolumeDto>() {volume};
await AddVolumeModifiers(userId, volumeList);
return volumeList[0];
}
/// <summary>
/// Returns all volumes that contain a seriesId in passed array.
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false)
{
var query = _context.Volume
.Where(v => seriesIds.Contains(v.SeriesId));
if (includeChapters)
{
query = query.Include(v => v.Chapters);
}
return await query.ToListAsync();
}
public async Task<bool> DeleteSeriesAsync(int seriesId) public async Task<bool> DeleteSeriesAsync(int seriesId)
{ {
@ -209,11 +155,12 @@ namespace API.Data.Repositories
return await _context.SaveChangesAsync() > 0; return await _context.SaveChangesAsync() > 0;
} }
public async Task<Volume> GetVolumeByIdAsync(int volumeId)
{
return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
}
/// <summary>
/// Returns Volumes, Metadata, and Collection Tags
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<Series> GetSeriesByIdAsync(int seriesId) public async Task<Series> GetSeriesByIdAsync(int seriesId)
{ {
return await _context.Series return await _context.Series
@ -244,7 +191,7 @@ namespace API.Data.Repositories
} }
/// <summary> /// <summary>
/// This returns a list of tuples<chapterId, seriesId> back for each series id passed /// This returns a dictonary mapping seriesId -> list of chapters back for each series id passed
/// </summary> /// </summary>
/// <param name="seriesIds"></param> /// <param name="seriesIds"></param>
/// <returns></returns> /// <returns></returns>
@ -301,24 +248,7 @@ namespace API.Data.Repositories
.SingleOrDefaultAsync(); .SingleOrDefaultAsync();
} }
private async Task AddVolumeModifiers(int userId, IReadOnlyCollection<VolumeDto> volumes)
{
var volIds = volumes.Select(s => s.Id);
var userProgress = await _context.AppUserProgresses
.Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId))
.AsNoTracking()
.ToListAsync();
foreach (var v in volumes)
{
foreach (var c in v.Chapters)
{
c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead);
}
v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead);
}
}
/// <summary> /// <summary>
/// Returns a list of Series that were added, ordered by Created desc /// Returns a list of Series that were added, ordered by Created desc
@ -497,5 +427,63 @@ namespace API.Data.Repositories
.AsNoTracking() .AsNoTracking()
.ToListAsync(); .ToListAsync();
} }
/// <summary>
/// Returns the number of series for a given library (or all libraries if libraryId is 0)
/// </summary>
/// <param name="libraryId">Defaults to 0, library to restrict count to</param>
/// <returns></returns>
private async Task<int> GetSeriesCount(int libraryId = 0)
{
if (libraryId > 0)
{
return await _context.Series
.Where(s => s.LibraryId == libraryId)
.CountAsync();
}
return await _context.Series.CountAsync();
}
/// <summary>
/// Returns the number of series that should be processed in parallel to optimize speed and memory. Minimum of 50
/// </summary>
/// <param name="libraryId">Defaults to 0 meaning no library</param>
/// <returns></returns>
private async Task<Tuple<int, int>> GetChunkSize(int libraryId = 0)
{
// TODO: Think about making this bigger depending on number of files a user has in said library
// and number of cores and amount of memory. We can then make an optimal choice
var totalSeries = await GetSeriesCount(libraryId);
var procCount = Math.Max(Environment.ProcessorCount - 1, 1);
if (totalSeries < procCount * 2 || totalSeries < 50)
{
return new Tuple<int, int>(totalSeries, totalSeries);
}
return new Tuple<int, int>(totalSeries, Math.Max(totalSeries / procCount, 50));
}
public async Task<Chunk> GetChunkInfo(int libraryId = 0)
{
var (totalSeries, chunkSize) = await GetChunkSize(libraryId);
if (totalSeries == 0) return new Chunk()
{
TotalChunks = 0,
TotalSize = 0,
ChunkSize = 0
};
var totalChunks = Math.Max((int) Math.Ceiling((totalSeries * 1.0) / chunkSize), 1);
return new Chunk()
{
TotalSize = totalSeries,
ChunkSize = chunkSize,
TotalChunks = totalChunks
};
}
} }
} }

View File

@ -1,7 +1,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs.Settings;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Repositories; using API.Interfaces.Repositories;
@ -35,6 +35,15 @@ namespace API.Data.Repositories
return _mapper.Map<ServerSettingDto>(settings); return _mapper.Map<ServerSettingDto>(settings);
} }
public ServerSettingDto GetSettingsDto()
{
var settings = _context.ServerSetting
.Select(x => x)
.AsNoTracking()
.ToList();
return _mapper.Map<ServerSettingDto>(settings);
}
public Task<ServerSetting> GetSettingAsync(ServerSettingKey key) public Task<ServerSetting> GetSettingAsync(ServerSettingKey key)
{ {
return _context.ServerSetting.SingleOrDefaultAsync(x => x.Key == key); return _context.ServerSetting.SingleOrDefaultAsync(x => x.Key == key);

View File

@ -153,6 +153,16 @@ namespace API.Data.Repositories
return await _userManager.GetUsersInRoleAsync(PolicyConstants.AdminRole); return await _userManager.GetUsersInRoleAsync(PolicyConstants.AdminRole);
} }
public async Task<IEnumerable<AppUser>> GetNonAdminUsersAsync()
{
return await _userManager.GetUsersInRoleAsync(PolicyConstants.PlebRole);
}
public async Task<bool> IsUserAdmin(AppUser user)
{
return await _userManager.IsInRoleAsync(user, PolicyConstants.AdminRole);
}
public async Task<AppUserRating> GetUserRating(int seriesId, int userId) public async Task<AppUserRating> GetUserRating(int seriesId, int userId)
{ {
return await _context.AppUserRating.Where(r => r.SeriesId == seriesId && r.AppUserId == userId) return await _context.AppUserRating.Where(r => r.SeriesId == seriesId && r.AppUserId == userId)
@ -237,8 +247,8 @@ namespace API.Data.Repositories
Libraries = u.Libraries.Select(l => new LibraryDto Libraries = u.Libraries.Select(l => new LibraryDto
{ {
Name = l.Name, Name = l.Name,
CoverImage = l.CoverImage,
Type = l.Type, Type = l.Type,
LastScanned = l.LastScanned,
Folders = l.Folders.Select(x => x.Path).ToList() Folders = l.Folders.Select(x => x.Path).ToList()
}).ToList() }).ToList()
}) })

View File

@ -1,9 +1,8 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader;
using API.Entities; using API.Entities;
using API.Interfaces.Repositories; using API.Interfaces.Repositories;
using AutoMapper; using AutoMapper;
@ -15,10 +14,17 @@ namespace API.Data.Repositories
public class VolumeRepository : IVolumeRepository public class VolumeRepository : IVolumeRepository
{ {
private readonly DataContext _context; private readonly DataContext _context;
private readonly IMapper _mapper;
public VolumeRepository(DataContext context) public VolumeRepository(DataContext context, IMapper mapper)
{ {
_context = context; _context = context;
_mapper = mapper;
}
public void Add(Volume volume)
{
_context.Volume.Add(volume);
} }
public void Update(Volume volume) public void Update(Volume volume)
@ -26,6 +32,16 @@ namespace API.Data.Repositories
_context.Entry(volume).State = EntityState.Modified; _context.Entry(volume).State = EntityState.Modified;
} }
public void Remove(Volume volume)
{
_context.Volume.Remove(volume);
}
/// <summary>
/// Returns a list of non-tracked files for a given volume.
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
public async Task<IList<MangaFile>> GetFilesForVolume(int volumeId) public async Task<IList<MangaFile>> GetFilesForVolume(int volumeId)
{ {
return await _context.Chapter return await _context.Chapter
@ -36,6 +52,11 @@ namespace API.Data.Repositories
.ToListAsync(); .ToListAsync();
} }
/// <summary>
/// Returns the cover image file for the given volume
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
public async Task<string> GetVolumeCoverImageAsync(int volumeId) public async Task<string> GetVolumeCoverImageAsync(int volumeId)
{ {
return await _context.Volume return await _context.Volume
@ -45,6 +66,11 @@ namespace API.Data.Repositories
.SingleOrDefaultAsync(); .SingleOrDefaultAsync();
} }
/// <summary>
/// Returns all chapter Ids belonging to a list of Volume Ids
/// </summary>
/// <param name="volumeIds"></param>
/// <returns></returns>
public async Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds) public async Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds)
{ {
return await _context.Chapter return await _context.Chapter
@ -52,5 +78,131 @@ namespace API.Data.Repositories
.Select(c => c.Id) .Select(c => c.Id)
.ToListAsync(); .ToListAsync();
} }
/// <summary>
/// Returns all volumes that contain a seriesId in passed array.
/// </summary>
/// <param name="seriesIds"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false)
{
var query = _context.Volume
.Where(v => seriesIds.Contains(v.SeriesId));
if (includeChapters)
{
query = query.Include(v => v.Chapters);
}
return await query.ToListAsync();
}
/// <summary>
/// Returns an individual Volume including Chapters and Files and Reading Progress for a given volumeId
/// </summary>
/// <param name="volumeId"></param>
/// <param name="userId"></param>
/// <returns></returns>
public async Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId)
{
var volume = await _context.Volume
.Where(vol => vol.Id == volumeId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.SingleAsync(vol => vol.Id == volumeId);
var volumeList = new List<VolumeDto>() {volume};
await AddVolumeModifiers(userId, volumeList);
return volumeList[0];
}
/// <summary>
/// Returns the full Volumes including Chapters and Files for a given series
/// </summary>
/// <param name="seriesId"></param>
/// <returns></returns>
public async Task<IEnumerable<Volume>> GetVolumes(int seriesId)
{
return await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.OrderBy(vol => vol.Number)
.ToListAsync();
}
/// <summary>
/// Returns a single volume with Chapter and Files
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
public async Task<Volume> GetVolumeAsync(int volumeId)
{
return await _context.Volume
.Include(vol => vol.Chapters)
.ThenInclude(c => c.Files)
.SingleOrDefaultAsync(vol => vol.Id == volumeId);
}
/// <summary>
/// Returns all volumes for a given series with progress information attached. Includes all Chapters as well.
/// </summary>
/// <param name="seriesId"></param>
/// <param name="userId"></param>
/// <returns></returns>
public async Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId)
{
var volumes = await _context.Volume
.Where(vol => vol.SeriesId == seriesId)
.Include(vol => vol.Chapters)
.OrderBy(volume => volume.Number)
.ProjectTo<VolumeDto>(_mapper.ConfigurationProvider)
.AsNoTracking()
.ToListAsync();
await AddVolumeModifiers(userId, volumes);
SortSpecialChapters(volumes);
return volumes;
}
public async Task<Volume> GetVolumeByIdAsync(int volumeId)
{
return await _context.Volume.SingleOrDefaultAsync(x => x.Id == volumeId);
}
private static void SortSpecialChapters(IEnumerable<VolumeDto> volumes)
{
var sorter = new NaturalSortComparer();
foreach (var v in volumes.Where(vDto => vDto.Number == 0))
{
v.Chapters = v.Chapters.OrderBy(x => x.Range, sorter).ToList();
}
}
private async Task AddVolumeModifiers(int userId, IReadOnlyCollection<VolumeDto> volumes)
{
var volIds = volumes.Select(s => s.Id);
var userProgress = await _context.AppUserProgresses
.Where(p => p.AppUserId == userId && volIds.Contains(p.VolumeId))
.AsNoTracking()
.ToListAsync();
foreach (var v in volumes)
{
foreach (var c in v.Chapters)
{
c.PagesRead = userProgress.Where(p => p.ChapterId == c.Id).Sum(p => p.PagesRead);
}
v.PagesRead = userProgress.Where(p => p.VolumeId == v.Id).Sum(p => p.PagesRead);
}
}
} }
} }

21
API/Data/Scanner/Chunk.cs Normal file
View File

@ -0,0 +1,21 @@
namespace API.Data.Scanner
{
/// <summary>
/// Represents a set of Entities which is broken up and iterated on
/// </summary>
public class Chunk
{
/// <summary>
/// Total number of entities
/// </summary>
public int TotalSize { get; set; }
/// <summary>
/// Size of each chunk to iterate over
/// </summary>
public int ChunkSize { get; set; }
/// <summary>
/// Total chunks to iterate over
/// </summary>
public int TotalChunks { get; set; }
}
}

View File

@ -49,6 +49,8 @@ namespace API.Data
new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json new () {Key = ServerSettingKey.Port, Value = "5000"}, // Not used from DB, but DB is sync with appSettings.json
new () {Key = ServerSettingKey.AllowStatCollection, Value = "true"}, new () {Key = ServerSettingKey.AllowStatCollection, Value = "true"},
new () {Key = ServerSettingKey.EnableOpds, Value = "false"}, new () {Key = ServerSettingKey.EnableOpds, Value = "false"},
new () {Key = ServerSettingKey.EnableAuthentication, Value = "true"},
new () {Key = ServerSettingKey.BaseUrl, Value = "/"},
}; };
foreach (var defaultSetting in defaultSettings) foreach (var defaultSetting in defaultSettings)

View File

@ -1,6 +1,5 @@
 
using System; using System;
using System.ComponentModel.DataAnnotations;
using API.Entities.Interfaces; using API.Entities.Interfaces;
namespace API.Entities namespace API.Entities

View File

@ -20,6 +20,10 @@ namespace API.Entities.Enums
AllowStatCollection = 6, AllowStatCollection = 6,
[Description("EnableOpds")] [Description("EnableOpds")]
EnableOpds = 7, EnableOpds = 7,
[Description("EnableAuthentication")]
EnableAuthentication = 8,
[Description("BaseUrl")]
BaseUrl = 9
} }
} }

View File

@ -8,12 +8,12 @@ namespace API.Entities
public int Id { get; set; } public int Id { get; set; }
public string Path { get; set; } public string Path { get; set; }
/// <summary> /// <summary>
/// Used when scanning to see if we can skip if nothing has changed. /// Used when scanning to see if we can skip if nothing has changed. (not implemented)
/// </summary> /// </summary>
public DateTime LastScanned { get; set; } public DateTime LastScanned { get; set; }
// Relationship // Relationship
public Library Library { get; set; } public Library Library { get; set; }
public int LibraryId { get; set; } public int LibraryId { get; set; }
} }
} }

View File

@ -13,9 +13,13 @@ namespace API.Entities
public LibraryType Type { get; set; } public LibraryType Type { get; set; }
public DateTime Created { get; set; } public DateTime Created { get; set; }
public DateTime LastModified { get; set; } public DateTime LastModified { get; set; }
/// <summary>
/// Last time Library was scanned
/// </summary>
public DateTime LastScanned { get; set; }
public ICollection<FolderPath> Folders { get; set; } public ICollection<FolderPath> Folders { get; set; }
public ICollection<AppUser> AppUsers { get; set; } public ICollection<AppUser> AppUsers { get; set; }
public ICollection<Series> Series { get; set; } public ICollection<Series> Series { get; set; }
} }
} }

View File

@ -38,5 +38,13 @@ namespace API.Entities
{ {
return File.GetLastWriteTime(FilePath) > LastModified; return File.GetLastWriteTime(FilePath) > LastModified;
} }
/// <summary>
/// Updates the Last Modified time of the underlying file
/// </summary>
public void UpdateLastModified()
{
LastModified = File.GetLastWriteTime(FilePath);
}
} }
} }

View File

@ -33,7 +33,7 @@ namespace API.Entities
/// <summary> /// <summary>
/// Summary information related to the Series /// Summary information related to the Series
/// </summary> /// </summary>
public string Summary { get; set; } // TODO: Migrate into SeriesMetdata (with Metadata update) public string Summary { get; set; } // NOTE: Migrate into SeriesMetdata (with Metadata update)
public DateTime Created { get; set; } public DateTime Created { get; set; }
public DateTime LastModified { get; set; } public DateTime LastModified { get; set; }
/// <summary> /// <summary>

View File

@ -8,6 +8,9 @@ namespace API.Entities
public class Volume : IEntityDate public class Volume : IEntityDate
{ {
public int Id { get; set; } public int Id { get; set; }
/// <summary>
/// A String representation of the volume number. Allows for floats
/// </summary>
public string Name { get; set; } public string Name { get; set; }
public int Number { get; set; } public int Number { get; set; }
public IList<Chapter> Chapters { get; set; } public IList<Chapter> Chapters { get; set; }

View File

@ -36,12 +36,13 @@ namespace API.Extensions
services.AddScoped<IVersionUpdaterService, VersionUpdaterService>(); services.AddScoped<IVersionUpdaterService, VersionUpdaterService>();
services.AddScoped<IDownloadService, DownloadService>(); services.AddScoped<IDownloadService, DownloadService>();
services.AddScoped<IReaderService, ReaderService>(); services.AddScoped<IReaderService, ReaderService>();
services.AddScoped<IAccountService, AccountService>();
services.AddScoped<IPresenceTracker, PresenceTracker>(); services.AddScoped<IPresenceTracker, PresenceTracker>();
services.AddSqLite(config, env); services.AddSqLite(config, env);
services.AddLogging(config); services.AddLogging(config);
services.AddSignalR(); services.AddSignalR(opt => opt.EnableDetailedErrors = true);
} }
private static void AddSqLite(this IServiceCollection services, IConfiguration config, private static void AddSqLite(this IServiceCollection services, IConfiguration config,

View File

@ -76,7 +76,8 @@ namespace API.Extensions
directoryIndex++; directoryIndex++;
} }
foreach (var subDirectory in directory.EnumerateDirectories()) var sort = new NaturalSortComparer();
foreach (var subDirectory in directory.EnumerateDirectories().OrderBy(d => d.FullName, sort))
{ {
FlattenDirectory(root, subDirectory, ref directoryIndex); FlattenDirectory(root, subDirectory, ref directoryIndex);
} }

View File

@ -1,4 +1,5 @@
using System.Linq; using System.IO;
using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using API.Helpers; using API.Helpers;
@ -41,8 +42,9 @@ namespace API.Extensions
public static void AddCacheHeader(this HttpResponse response, string filename) public static void AddCacheHeader(this HttpResponse response, string filename)
{ {
if (filename == null || filename.Length <= 0) return; if (filename == null || filename.Length <= 0) return;
var hashContent = filename + File.GetLastWriteTimeUtc(filename);
using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider(); using var sha1 = new System.Security.Cryptography.SHA256CryptoServiceProvider();
response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(filename)).Select(x => x.ToString("X2")))); response.Headers.Add("ETag", string.Concat(sha1.ComputeHash(Encoding.UTF8.GetBytes(hashContent)).Select(x => x.ToString("X2"))));
} }
} }

View File

@ -3,6 +3,7 @@ using System.Linq;
using API.DTOs; using API.DTOs;
using API.DTOs.Reader; using API.DTOs.Reader;
using API.DTOs.ReadingLists; using API.DTOs.ReadingLists;
using API.DTOs.Settings;
using API.Entities; using API.Entities;
using API.Helpers.Converters; using API.Helpers.Converters;
using AutoMapper; using AutoMapper;

View File

@ -1,5 +1,5 @@
using System.Collections.Generic; using System.Collections.Generic;
using API.DTOs; using API.DTOs.Settings;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using AutoMapper; using AutoMapper;
@ -36,6 +36,12 @@ namespace API.Helpers.Converters
case ServerSettingKey.EnableOpds: case ServerSettingKey.EnableOpds:
destination.EnableOpds = bool.Parse(row.Value); destination.EnableOpds = bool.Parse(row.Value);
break; break;
case ServerSettingKey.EnableAuthentication:
destination.EnableAuthentication = bool.Parse(row.Value);
break;
case ServerSettingKey.BaseUrl:
destination.BaseUrl = row.Value;
break;
} }
} }

View File

@ -1,5 +1,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data.Repositories;
using API.DTOs; using API.DTOs;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
@ -13,7 +14,7 @@ namespace API.Interfaces.Repositories
void Delete(Library library); void Delete(Library library);
Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync(); Task<IEnumerable<LibraryDto>> GetLibraryDtosAsync();
Task<bool> LibraryExists(string libraryName); Task<bool> LibraryExists(string libraryName);
Task<Library> GetLibraryForIdAsync(int libraryId); Task<Library> GetLibraryForIdAsync(int libraryId, LibraryIncludes includes);
Task<Library> GetFullLibraryForIdAsync(int libraryId); Task<Library> GetFullLibraryForIdAsync(int libraryId);
Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId); Task<Library> GetFullLibraryForIdAsync(int libraryId, int seriesId);
Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName); Task<IEnumerable<LibraryDto>> GetLibraryDtosForUsernameAsync(string userName);

View File

@ -1,7 +1,6 @@
using System; using System.Collections.Generic;
using System.Collections;
using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data.Scanner;
using API.DTOs; using API.DTOs;
using API.DTOs.Filtering; using API.DTOs.Filtering;
using API.Entities; using API.Entities;
@ -11,12 +10,10 @@ namespace API.Interfaces.Repositories
{ {
public interface ISeriesRepository public interface ISeriesRepository
{ {
void Add(Series series); void Attach(Series series);
void Update(Series series); void Update(Series series);
Task<Series> GetSeriesByNameAsync(string name); void Remove(Series series);
Task<bool> DoesSeriesNameExistInLibrary(string name); Task<bool> DoesSeriesNameExistInLibrary(string name);
Series GetSeriesByName(string name);
/// <summary> /// <summary>
/// Adds user information like progress, ratings, etc /// Adds user information like progress, ratings, etc
/// </summary> /// </summary>
@ -25,7 +22,6 @@ namespace API.Interfaces.Repositories
/// <param name="userParams"></param> /// <param name="userParams"></param>
/// <returns></returns> /// <returns></returns>
Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter); Task<PagedList<SeriesDto>> GetSeriesDtoForLibraryIdAsync(int libraryId, int userId, UserParams userParams, FilterDto filter);
/// <summary> /// <summary>
/// Does not add user information like progress, ratings, etc. /// Does not add user information like progress, ratings, etc.
/// </summary> /// </summary>
@ -34,20 +30,8 @@ namespace API.Interfaces.Repositories
/// <returns></returns> /// <returns></returns>
Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery); Task<IEnumerable<SearchResultDto>> SearchSeries(int[] libraryIds, string searchQuery);
Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId); Task<IEnumerable<Series>> GetSeriesForLibraryIdAsync(int libraryId);
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
Task<IEnumerable<Volume>> GetVolumes(int seriesId);
Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId); Task<SeriesDto> GetSeriesDtoByIdAsync(int seriesId, int userId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId);
/// <summary>
/// A fast lookup of just the volume information with no tracking.
/// </summary>
/// <param name="volumeId"></param>
/// <returns></returns>
Task<VolumeDto> GetVolumeDtoAsync(int volumeId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
Task<bool> DeleteSeriesAsync(int seriesId); Task<bool> DeleteSeriesAsync(int seriesId);
Task<Volume> GetVolumeByIdAsync(int volumeId);
Task<Series> GetSeriesByIdAsync(int seriesId); Task<Series> GetSeriesByIdAsync(int seriesId);
Task<int[]> GetChapterIdsForSeriesAsync(int[] seriesIds); Task<int[]> GetChapterIdsForSeriesAsync(int[] seriesIds);
Task<IDictionary<int, IList<int>>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds); Task<IDictionary<int, IList<int>>> GetChapterIdWithSeriesIdForSeriesAsync(int[] seriesIds);
@ -58,16 +42,17 @@ namespace API.Interfaces.Repositories
/// <param name="series"></param> /// <param name="series"></param>
/// <returns></returns> /// <returns></returns>
Task AddSeriesModifiers(int userId, List<SeriesDto> series); Task AddSeriesModifiers(int userId, List<SeriesDto> series);
Task<string> GetSeriesCoverImageAsync(int seriesId); Task<string> GetSeriesCoverImageAsync(int seriesId);
Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, UserParams userParams, FilterDto filter); Task<IEnumerable<SeriesDto>> GetInProgress(int userId, int libraryId, UserParams userParams, FilterDto filter);
Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); Task<PagedList<SeriesDto>> GetRecentlyAdded(int libraryId, int userId, UserParams userParams, FilterDto filter); // NOTE: Probably put this in LibraryRepo
Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId); Task<SeriesMetadataDto> GetSeriesMetadata(int seriesId);
Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams); Task<PagedList<SeriesDto>> GetSeriesDtoForCollectionAsync(int collectionId, int userId, UserParams userParams);
Task<IList<MangaFile>> GetFilesForSeries(int seriesId); Task<IList<MangaFile>> GetFilesForSeries(int seriesId);
Task<IEnumerable<SeriesDto>> GetSeriesDtoForIdsAsync(IEnumerable<int> seriesIds, int userId); Task<IEnumerable<SeriesDto>> GetSeriesDtoForIdsAsync(IEnumerable<int> seriesIds, int userId);
Task<IList<string>> GetAllCoverImagesAsync(); Task<IList<string>> GetAllCoverImagesAsync();
Task<IEnumerable<string>> GetLockedCoverImagesAsync(); Task<IEnumerable<string>> GetLockedCoverImagesAsync();
Task<PagedList<Series>> GetFullSeriesForLibraryIdAsync(int libraryId, UserParams userParams);
Task<Series> GetFullSeriesForSeriesIdAsync(int seriesId);
Task<Chunk> GetChunkInfo(int libraryId = 0);
} }
} }

View File

@ -1,6 +1,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.DTOs; using API.DTOs.Settings;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
@ -10,6 +10,7 @@ namespace API.Interfaces.Repositories
{ {
void Update(ServerSetting settings); void Update(ServerSetting settings);
Task<ServerSettingDto> GetSettingsDtoAsync(); Task<ServerSettingDto> GetSettingsDtoAsync();
ServerSettingDto GetSettingsDto();
Task<ServerSetting> GetSettingAsync(ServerSettingKey key); Task<ServerSetting> GetSettingAsync(ServerSettingKey key);
Task<IEnumerable<ServerSetting>> GetSettingsAsync(); Task<IEnumerable<ServerSetting>> GetSettingsAsync();

View File

@ -15,6 +15,8 @@ namespace API.Interfaces.Repositories
public void Delete(AppUser user); public void Delete(AppUser user);
Task<IEnumerable<MemberDto>> GetMembersAsync(); Task<IEnumerable<MemberDto>> GetMembersAsync();
Task<IEnumerable<AppUser>> GetAdminUsersAsync(); Task<IEnumerable<AppUser>> GetAdminUsersAsync();
Task<IEnumerable<AppUser>> GetNonAdminUsersAsync();
Task<bool> IsUserAdmin(AppUser user);
Task<AppUserRating> GetUserRating(int seriesId, int userId); Task<AppUserRating> GetUserRating(int seriesId, int userId);
Task<AppUserPreferences> GetPreferencesAsync(string username); Task<AppUserPreferences> GetPreferencesAsync(string username);
Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForSeries(int userId, int seriesId); Task<IEnumerable<BookmarkDto>> GetBookmarkDtosForSeries(int userId, int seriesId);

View File

@ -7,9 +7,19 @@ namespace API.Interfaces.Repositories
{ {
public interface IVolumeRepository public interface IVolumeRepository
{ {
void Add(Volume volume);
void Update(Volume volume); void Update(Volume volume);
void Remove(Volume volume);
Task<IList<MangaFile>> GetFilesForVolume(int volumeId); Task<IList<MangaFile>> GetFilesForVolume(int volumeId);
Task<string> GetVolumeCoverImageAsync(int volumeId); Task<string> GetVolumeCoverImageAsync(int volumeId);
Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds); Task<IList<int>> GetChapterIdsByVolumeIds(IReadOnlyList<int> volumeIds);
// From Series Repo
Task<IEnumerable<VolumeDto>> GetVolumesDtoAsync(int seriesId, int userId);
Task<Volume> GetVolumeAsync(int volumeId);
Task<VolumeDto> GetVolumeDtoAsync(int volumeId, int userId);
Task<IEnumerable<Volume>> GetVolumesForSeriesAsync(IList<int> seriesIds, bool includeChapters = false);
Task<IEnumerable<Volume>> GetVolumes(int seriesId);
Task<Volume> GetVolumeByIdAsync(int volumeId);
} }
} }

View File

@ -0,0 +1,12 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using API.Entities;
using API.Errors;
namespace API.Interfaces.Services
{
public interface IAccountService
{
Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword);
}
}

View File

@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO.Compression; using System.IO.Compression;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Archive; using API.Archive;
using API.Data.Metadata;
namespace API.Interfaces.Services namespace API.Interfaces.Services
{ {
@ -12,7 +13,7 @@ namespace API.Interfaces.Services
int GetNumberOfPagesFromArchive(string archivePath); int GetNumberOfPagesFromArchive(string archivePath);
string GetCoverImage(string archivePath, string fileName); string GetCoverImage(string archivePath, string fileName);
bool IsValidArchive(string archivePath); bool IsValidArchive(string archivePath);
string GetSummaryInfo(string archivePath); ComicInfo GetComicInfo(string archivePath);
ArchiveLibrary CanOpen(string archivePath); ArchiveLibrary CanOpen(string archivePath);
bool ArchiveNeedsFlattening(ZipArchive archive); bool ArchiveNeedsFlattening(ZipArchive archive);
Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder); Task<Tuple<byte[], string>> CreateZipForDownload(IEnumerable<string> files, string tempFolder);

View File

@ -1,5 +1,6 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data.Metadata;
using API.Parser; using API.Parser;
using VersOne.Epub; using VersOne.Epub;
@ -20,7 +21,7 @@ namespace API.Interfaces.Services
/// <param name="book">Book Reference, needed for if you expect Import statements</param> /// <param name="book">Book Reference, needed for if you expect Import statements</param>
/// <returns></returns> /// <returns></returns>
Task<string> ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book); Task<string> ScopeStyles(string stylesheetHtml, string apiBase, string filename, EpubBookRef book);
string GetSummaryInfo(string filePath); ComicInfo GetComicInfo(string filePath);
ParserInfo ParseInfo(string filePath); ParserInfo ParseInfo(string filePath);
/// <summary> /// <summary>
/// Extracts a PDF file's pages as images to an target directory /// Extracts a PDF file's pages as images to an target directory

View File

@ -11,9 +11,8 @@ namespace API.Interfaces.Services
/// cover images if forceUpdate is true. /// cover images if forceUpdate is true.
/// </summary> /// </summary>
/// <param name="libraryId">Library to scan against</param> /// <param name="libraryId">Library to scan against</param>
/// <param name="forceUpdate">Force overwriting for cover images</param> Task ScanLibrary(int libraryId);
Task ScanLibrary(int libraryId, bool forceUpdate);
Task ScanLibraries(); Task ScanLibraries();
Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token); Task ScanSeries(int libraryId, int seriesId, CancellationToken token);
} }
} }

View File

@ -1,7 +1,6 @@
 
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Data;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
@ -210,7 +209,7 @@ namespace API.Interfaces.Services
/// <returns>-1 if nothing can be found</returns> /// <returns>-1 if nothing can be found</returns>
public async Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId) public async Task<int> GetNextChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{ {
var volumes = (await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)).ToList(); var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId); var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId); var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);
@ -262,7 +261,7 @@ namespace API.Interfaces.Services
/// <returns>-1 if nothing can be found</returns> /// <returns>-1 if nothing can be found</returns>
public async Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId) public async Task<int> GetPrevChapterIdAsync(int seriesId, int volumeId, int currentChapterId, int userId)
{ {
var volumes = (await _unitOfWork.SeriesRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList(); var volumes = (await _unitOfWork.VolumeRepository.GetVolumesDtoAsync(seriesId, userId)).Reverse().ToList();
var currentVolume = volumes.Single(v => v.Id == volumeId); var currentVolume = volumes.Single(v => v.Id == volumeId);
var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId); var currentChapter = currentVolume.Chapters.Single(c => c.Id == currentChapterId);

View File

@ -21,29 +21,28 @@ namespace API.Parser
public const string SupportedExtensions = public const string SupportedExtensions =
ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions; ArchiveFileExtensions + "|" + ImageFileExtensions + "|" + BookFileExtensions;
private const RegexOptions MatchOptions =
RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant;
public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))", public static readonly Regex FontSrcUrlRegex = new Regex(@"(src:url\(.{1})" + "([^\"']*)" + @"(.{1}\))",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)", public static readonly Regex CssImportUrlRegex = new Regex("(@import\\s[\"|'])(?<Filename>[\\w\\d/\\._-]+)([\"|'];?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
private static readonly string XmlRegexExtensions = @"\.xml"; private static readonly string XmlRegexExtensions = @"\.xml";
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, private static readonly Regex ImageRegex = new Regex(ImageFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions, private static readonly Regex ArchiveFileRegex = new Regex(ArchiveFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions, private static readonly Regex XmlRegex = new Regex(XmlRegexExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
private static readonly Regex BookFileRegex = new Regex(BookFileExtensions, private static readonly Regex BookFileRegex = new Regex(BookFileExtensions,
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
private static readonly Regex CoverImageRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(cover|folder)(?![\w\d])", private static readonly Regex CoverImageRegex = new Regex(@"(?<![[a-z]\d])(?:!?)(cover|folder)(?![\w\d])",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout);
RegexTimeout);
private static readonly Regex NormalizeRegex = new Regex(@"[^a-zA-Z0-9\+]",
MatchOptions, RegexTimeout);
private static readonly Regex[] MangaVolumeRegex = new[] private static readonly Regex[] MangaVolumeRegex = new[]
@ -51,43 +50,35 @@ namespace API.Parser
// Dance in the Vampire Bund v16-17 // Dance in the Vampire Bund v16-17
new Regex( new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)", @"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d+)( |_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar // NEEDLESS_Vol.4_-Simeon_6_v2[SugoiSugoi].rar
new Regex( new Regex(
@"(?<Series>.*)(\b|_)(?!\[)(vol\.?)(?<Volume>\d+(-\d+)?)(?!\])", @"(?<Series>.*)(\b|_)(?!\[)(vol\.?)(?<Volume>\d+(-\d+)?)(?!\])",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17 // Historys Strongest Disciple Kenichi_v11_c90-98.zip or Dance in the Vampire Bund v16-17
new Regex( new Regex(
@"(?<Series>.*)(\b|_)(?!\[)v(?<Volume>\d+(-\d+)?)(?!\])", @"(?<Series>.*)(\b|_)(?!\[)v(?<Volume>\d+(-\d+)?)(?!\])",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Kodomo no Jikan vol. 10, [dmntsf.net] One Piece - Digital Colored Comics Vol. 20.5-21.5 Ch. 177
// Kodomo no Jikan vol. 10
new Regex( new Regex(
@"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(-\d+)?)", @"(?<Series>.*)(\b|_)(vol\.? ?)(?<Volume>\d+(\.\d)?(-\d+)?(\.\d)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) // Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex( new Regex(
@"(vol\.? ?)(?<Volume>\d+)", @"(vol\.? ?)(?<Volume>\d+(\.\d)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Tonikaku Cawaii [Volume 11].cbz // Tonikaku Cawaii [Volume 11].cbz
new Regex( new Regex(
@"(volume )(?<Volume>\d+)", @"(volume )(?<Volume>\d+(\.\d)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Tower Of God S01 014 (CBT) (digital).cbz // Tower Of God S01 014 (CBT) (digital).cbz
new Regex( new Regex(
@"(?<Series>.*)(\b|_|)(S(?<Volume>\d+))", @"(?<Series>.*)(\b|_|)(S(?<Volume>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// vol_001-1.cbz for MangaPy default naming convention // vol_001-1.cbz for MangaPy default naming convention
new Regex( new Regex(
@"(vol_)(?<Volume>\d+)", @"(vol_)(?<Volume>\d+(\.\d)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
}; };
private static readonly Regex[] MangaSeriesRegex = new[] private static readonly Regex[] MangaSeriesRegex = new[]
@ -95,167 +86,138 @@ namespace API.Parser
// Grand Blue Dreaming - SP02 // Grand Blue Dreaming - SP02
new Regex( new Regex(
@"(?<Series>.*)(\b|_|-|\s)(?:sp)\d", @"(?<Series>.*)(\b|_|-|\s)(?:sp)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar, Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz // [SugoiSugoi]_NEEDLESS_Vol.2_-_Disk_The_Informant_5_[ENG].rar, Yuusha Ga Shinda! - Vol.tbd Chapter 27.001 V2 Infection ①.cbz
new Regex( new Regex(
@"^(?<Series>.*)( |_)Vol\.?(\d+|tbd)", @"^(?<Series>.*)( |_)Vol\.?(\d+|tbd)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex), // Mad Chimera World - Volume 005 - Chapter 026.cbz (couldn't figure out how to get Volume negative lookaround working on below regex),
// The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake // The Duke of Death and His Black Maid - Vol. 04 Ch. 054.5 - V4 Omake
new Regex( new Regex(
@"(?<Series>.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?<Chapter>\d+)", @"(?<Series>.+?)(\s|_|-)+(?:Vol(ume|\.)?(\s|_|-)+\d+)(\s|_|-)+(?:(Ch|Chapter|Ch)\.?)(\s|_|-)+(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions,
RegexTimeout), RegexTimeout),
// Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip // Ichiban_Ushiro_no_Daimaou_v04_ch34_[VISCANS].zip, VanDread-v01-c01.zip
new Regex( new Regex(
@"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)(\s|_|-)", @"(?<Series>.*)(\b|_)v(?<Volume>\d+-?\d*)(\s|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions,
RegexTimeout), RegexTimeout),
// Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto] // Gokukoku no Brynhildr - c001-008 (v01) [TrinityBAKumA], Black Bullet - v4 c17 [batoto]
new Regex( new Regex(
@"(?<Series>.*)( - )(?:v|vo|c)\d", @"(?<Series>.*)( - )(?:v|vo|c)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip // Kedouin Makoto - Corpse Party Musume, Chapter 19 [Dametrans].zip
new Regex( new Regex(
@"(?<Series>.*)(?:, Chapter )(?<Chapter>\d+)", @"(?<Series>.*)(?:, Chapter )(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz // Please Go Home, Akutsu-San! - Chapter 038.5 - Volume Announcement.cbz
new Regex( new Regex(
@"(?<Series>.*)(\s|_|-)(?!Vol)(\s|_|-)(?:Chapter)(\s|_|-)(?<Chapter>\d+)", @"(?<Series>.*)(\s|_|-)(?!Vol)(\s|_|-)(?:Chapter)(\s|_|-)(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz // [dmntsf.net] One Piece - Digital Colored Comics Vol. 20 Ch. 177 - 30 Million vs 81 Million.cbz
new Regex( new Regex(
@"(?<Series>.*) (\b|_|-)(vol)\.?(\s|-|_)?\d+", @"(?<Series>.*) (\b|_|-)(vol)\.?(\s|-|_)?\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans] // [xPearse] Kyochuu Rettou Volume 1 [English] [Manga] [Volume Scans]
new Regex( new Regex(
@"(?<Series>.*) (\b|_|-)(vol)(ume)", @"(?<Series>.*) (\b|_|-)(vol)(ume)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions,
RegexTimeout), RegexTimeout),
//Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans] //Knights of Sidonia c000 (S2 LE BD Omake - BLAME!) [Habanero Scans]
new Regex( new Regex(
@"(?<Series>.*)(\bc\d+\b)", @"(?<Series>.*)(\bc\d+\b)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
//Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz //Tonikaku Cawaii [Volume 11], Darling in the FranXX - Volume 01.cbz
new Regex( new Regex(
@"(?<Series>.*)(?: _|-|\[|\()\s?vol(ume)?", @"(?<Series>.*)(?: _|-|\[|\()\s?vol(ume)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz // Momo The Blood Taker - Chapter 027 Violent Emotion.cbz, Grand Blue Dreaming - SP02 Extra (2019) (Digital) (danke-Empire).cbz
new Regex( new Regex(
@"^(?<Series>(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d", @"^(?<Series>(?!Vol).+?)(?:(ch(apter|\.)(\b|_|-|\s))|sp)\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb) // Historys Strongest Disciple Kenichi_v11_c90-98.zip, Killing Bites Vol. 0001 Ch. 0001 - Galactica Scanlations (gb)
new Regex( new Regex(
@"(?<Series>.*) (\b|_|-)(v|ch\.?|c)\d+", @"(?<Series>.*) (\b|_|-)(v|ch\.?|c)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
//Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip //Ichinensei_ni_Nacchattara_v01_ch01_[Taruby]_v1.1.zip must be before [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
// due to duplicate version identifiers in file. // due to duplicate version identifiers in file.
new Regex( new Regex(
@"(?<Series>.*)(v|s)\d+(-\d+)?(_|\s)", @"(?<Series>.*)(v|s)\d+(-\d+)?(_|\s)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
//[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip //[Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex( new Regex(
@"(?<Series>.*)(v|s)\d+(-\d+)?", @"(?<Series>.*)(v|s)\d+(-\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz
new Regex( new Regex(
@"(?<Series>.*) (?<Chapter>\d+) (?:\(\d{4}\)) ", @"(?<Series>.*) (?<Chapter>\d+) (?:\(\d{4}\)) ",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire) // Goblin Slayer - Brand New Day 006.5 (2019) (Digital) (danke-Empire)
new Regex( new Regex(
@"(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?) \(\d{4}\)", @"(?<Series>.*) (?<Chapter>\d+(?:.\d+|-\d+)?) \(\d{4}\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Noblesse - Episode 429 (74 Pages).7z // Noblesse - Episode 429 (74 Pages).7z
new Regex( new Regex(
@"(?<Series>.*)(\s|_)(?:Episode|Ep\.?)(\s|_)(?<Chapter>\d+(?:.\d+|-\d+)?)", @"(?<Series>.*)(\s|_)(?:Episode|Ep\.?)(\s|_)(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ) // Akame ga KILL! ZERO (2016-2019) (Digital) (LuCaZ)
new Regex( new Regex(
@"(?<Series>.*)\(\d", @"(?<Series>.*)\(\d",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Tonikaku Kawaii (Ch 59-67) (Ongoing) // Tonikaku Kawaii (Ch 59-67) (Ongoing)
new Regex( new Regex(
@"(?<Series>.*)(\s|_)\((c\s|ch\s|chapter\s)", @"(?<Series>.*)(\s|_)\((c\s|ch\s|chapter\s)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Black Bullet (This is very loose, keep towards bottom) // Black Bullet (This is very loose, keep towards bottom)
new Regex( new Regex(
@"(?<Series>.*)(_)(v|vo|c|volume)( |_)\d+", @"(?<Series>.*)(_)(v|vo|c|volume)( |_)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar
new Regex( new Regex(
@"(?<Series>.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)", @"(?<Series>.*)( |_)(vol\d+)?( |_)(?:Chp\.? ?\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1 // Mahoutsukai to Deshi no Futekisetsu na Kankei Chp. 1
new Regex( new Regex(
@"(?<Series>.*)( |_)(?:Chp.? ?\d+)", @"(?<Series>.*)( |_)(?:Chp.? ?\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01 // Corpse Party -The Anthology- Sachikos game of love Hysteric Birthday 2U Chapter 01
new Regex( new Regex(
@"^(?!Vol)(?<Series>.*)( |_)Chapter( |_)(\d+)", @"^(?!Vol)(?<Series>.*)( |_)Chapter( |_)(\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Fullmetal Alchemist chapters 101-108.cbz // Fullmetal Alchemist chapters 101-108.cbz
new Regex( new Regex(
@"^(?!vol)(?<Series>.*)( |_)(chapters( |_)?)\d+-?\d*", @"^(?!vol)(?<Series>.*)( |_)(chapters( |_)?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1 // Umineko no Naku Koro ni - Episode 1 - Legend of the Golden Witch #1
new Regex( new Regex(
@"^(?!Vol\.?)(?<Series>.*)( |_|-)(?<!-)(episode|chapter|(ch\.?) ?)\d+-?\d*", @"^(?!Vol\.?)(?<Series>.*)( |_|-)(?<!-)(episode|chapter|(ch\.?) ?)\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Baketeriya ch01-05.zip // Baketeriya ch01-05.zip
new Regex( new Regex(
@"^(?!Vol)(?<Series>.*)ch\d+-?\d?", @"^(?!Vol)(?<Series>.*)ch\d+-?\d?",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Magi - Ch.252-005.cbz // Magi - Ch.252-005.cbz
new Regex( new Regex(
@"(?<Series>.*)( ?- ?)Ch\.\d+-?\d*", @"(?<Series>.*)( ?- ?)Ch\.\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [BAA]_Darker_than_Black_Omake-1.zip // [BAA]_Darker_than_Black_Omake-1.zip
new Regex( new Regex(
@"^(?!Vol)(?<Series>.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?<Series>.*)( |_)(\d+) @"^(?!Vol)(?<Series>.*)(-)\d+-?\d*", // This catches a lot of stuff ^(?!Vol)(?<Series>.*)( |_)(\d+)
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Kodoja #001 (March 2016) // Kodoja #001 (March 2016)
new Regex( new Regex(
@"(?<Series>.*)(\s|_|-)#", @"(?<Series>.*)(\s|_|-)#",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Baketeriya ch01-05.zip, Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar, A Compendium of Ghosts - 031 - The Third Story_ Part 12 (Digital) (Cobalt001) // Baketeriya ch01-05.zip, Akiiro Bousou Biyori - 01.jpg, Beelzebub_172_RHS.zip, Cynthia the Mission 29.rar, A Compendium of Ghosts - 031 - The Third Story_ Part 12 (Digital) (Cobalt001)
new Regex( new Regex(
@"^(?!Vol\.?)(?<Series>.+?)( |_|-)(?<!-)(ch)?\d+-?\d*", @"^(?!Vol\.?)(?<Series>.+?)( |_|-)(?<!-)(ch)?\d+-?\d*",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last) // [BAA]_Darker_than_Black_c1 (This is very greedy, make sure it's close to last)
new Regex( new Regex(
@"^(?!Vol)(?<Series>.*)( |_|-)(ch?)\d+", @"^(?!Vol)(?<Series>.*)( |_|-)(ch?)\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
}; };
private static readonly Regex[] ComicSeriesRegex = new[] private static readonly Regex[] ComicSeriesRegex = new[]
@ -263,110 +225,79 @@ namespace API.Parser
// Invincible Vol 01 Family matters (2005) (Digital) // Invincible Vol 01 Family matters (2005) (Digital)
new Regex( new Regex(
@"(?<Series>.*)(\b|_)(vol\.?)( |_)(?<Volume>\d+(-\d+)?)", @"(?<Series>.*)(\b|_)(vol\.?)( |_)(?<Volume>\d+(-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Batman Beyond 2.0 001 (2013)
new Regex(
@"^(?<Series>.+?\S\.\d) (?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS) // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
new Regex( new Regex(
@"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)", @"^(?<Volume>\d+)\s(-\s|_)(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// 01 Spider-Man & Wolverine 01.cbr // 01 Spider-Man & Wolverine 01.cbr
new Regex( new Regex(
@"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?", @"^(?<Volume>\d+)\s(?:-\s)(?<Series>.*) (\d+)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Batman & Wildcat (1 of 3) // Batman & Wildcat (1 of 3)
new Regex( new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Volume>\d+) of \d+)", @"(?<Series>.*(\d{4})?)( |_)(?:\((?<Volume>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)v\d+", @"^(?<Series>.*)(?: |_)v\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Amazing Man Comics chapter 25 // Amazing Man Comics chapter 25
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)c(hapter) \d+", @"^(?<Series>.*)(?: |_)c(hapter) \d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Amazing Man Comics issue #25 // Amazing Man Comics issue #25
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)i(ssue) #\d+", @"^(?<Series>.*)(?: |_)i(ssue) #\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Batman Wayne Family Adventures - Ep. 001 - Moving In // Batman Wayne Family Adventures - Ep. 001 - Moving In
new Regex( new Regex(
@"^(?<Series>.+?)(\s|_|-)?(?:Ep\.?)(\s|_|-)+\d+", @"^(?<Series>.+?)(\s|_|-)?(?:Ep\.?)(\s|_|-)+\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Batgirl Vol.2000 #57 (December, 2004)
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.+?)(?: \d+)", @"^(?<Series>.+?)Vol\.?\s?#?(?:\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Batman & Robin the Teen Wonder #0 // Batman & Robin the Teen Wonder #0
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)#\d+", @"^(?<Series>.*)(?: |_)#\d+",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex(
@"^(?<Series>.+?)(?: \d+)",
MatchOptions, RegexTimeout),
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) // Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005)
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)(?<Volume>\d+)", @"^(?<Series>.*)(?: |_)(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// The First Asterix Frieze (WebP by Doc MaKS) // The First Asterix Frieze (WebP by Doc MaKS)
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(", @"^(?<Series>.*)(?: |_)(?!\(\d{4}|\d{4}-\d{2}\))\(",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader)
new Regex(
@"^(?<Series>.+?)-(chapter-)?(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// MUST BE LAST: Batman & Daredevil - King of New York // MUST BE LAST: Batman & Daredevil - King of New York
new Regex( new Regex(
@"^(?<Series>.*)", @"^(?<Series>.*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
}; };
private static readonly Regex[] ComicVolumeRegex = new[] private static readonly Regex[] ComicVolumeRegex = new[]
{ {
// // 04 - Asterix the Gladiator (1964) (Digital-Empire) (WebP by Doc MaKS)
// new Regex(
// @"^(?<Volume>\d+) (- |_)?(?<Series>.*(\d{4})?)( |_)(\(|\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// // 01 Spider-Man & Wolverine 01.cbr
// new Regex(
// @"^(?<Volume>\d+) (?:- )?(?<Series>.*) (\d+)?",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// // Batman & Wildcat (1 of 3)
// new Regex(
// @"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)v(?<Volume>\d+)", @"^(?<Series>.*)(?: |_)v(?<Volume>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Batgirl Vol.2000 #57 (December, 2004)
// Scott Pilgrim 02 - Scott Pilgrim vs. The World (2005) new Regex(
// BUG: Negative lookbehind has to be fixed width @"^(?<Series>.+?)(?:\s|_)vol\.?\s?(?<Volume>\d+)",
// NOTE: The case this is built for does not make much sense. MatchOptions, RegexTimeout),
// new Regex(
// @"^(?<Series>.+?)(?<!c(hapter)|i(ssue))(?<!of)(?: |_)(?<!of )(?<Volume>\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
// new Regex(
// @"^(?<Series>.+?)(?<!c(hapter)|i(ssue))(?<!of)(?: (?<Volume>\d+))",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
// // Batman & Robin the Teen Wonder #0
// new Regex(
// @"^(?<Series>.*)(?: |_)#(?<Volume>\d+)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled,
// RegexTimeout),
}; };
private static readonly Regex[] ComicChapterRegex = new[] private static readonly Regex[] ComicChapterRegex = new[]
@ -374,59 +305,68 @@ namespace API.Parser
// Batman & Wildcat (1 of 3) // Batman & Wildcat (1 of 3)
new Regex( new Regex(
@"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)", @"(?<Series>.*(\d{4})?)( |_)(?:\((?<Chapter>\d+) of \d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Batman Beyond 04 (of 6) (1999) // Batman Beyond 04 (of 6) (1999)
new Regex( new Regex(
@"(?<Series>.+?)(?<Chapter>\d+)(\s|_|-)?\(of", @"(?<Series>.+?)(?<Chapter>\d+)(\s|_|-)?\(of",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Batman Beyond 2.0 001 (2013)
new Regex(
@"^(?<Series>.+?\S\.\d) (?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) // Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.+?)(?: |_)v(?<Volume>\d+)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)", @"^(?<Series>.+?)(?: |_)v(?<Volume>\d+)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.+?)(?:\s|_)#(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr // Invincible 070.5 - Invincible Returns 1 (2010) (digital) (Minutemen-InnerDemons).cbr
new Regex( new Regex(
@"^(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-", @"^(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)(c? ?)-",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
// Batgirl Vol.2000 #57 (December, 2004)
new Regex(
@"^(?<Series>.+?)(?:vol\.?\d+)\s#(?<Chapter>\d+)",
MatchOptions,
RegexTimeout), RegexTimeout),
// Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus) // Batman & Catwoman - Trail of the Gun 01, Batman & Grendel (1996) 01 - Devil's Bones, Teen Titans v1 001 (1966-02) (digital) (OkC.O.M.P.U.T.O.-Novus)
new Regex( new Regex(
@"^(?<Series>.+?)(?: (?<Chapter>\d+))", @"^(?<Series>.+?)(?: (?<Chapter>\d+))",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Batman & Robin the Teen Wonder #0
new Regex(
@"^(?<Series>.+?)(?:\s|_)#(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled,
RegexTimeout),
// Saga 001 (2012) (Digital) (Empire-Zone) // Saga 001 (2012) (Digital) (Empire-Zone)
new Regex( new Regex(
@"(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}", @"(?<Series>.+?)(?: |_)(c? ?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)\s\(\d{4}",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Amazing Man Comics chapter 25 // Amazing Man Comics chapter 25
new Regex( new Regex(
@"^(?!Vol)(?<Series>.+?)( |_)c(hapter)( |_)(?<Chapter>\d*)", @"^(?!Vol)(?<Series>.+?)( |_)c(hapter)( |_)(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Amazing Man Comics issue #25 // Amazing Man Comics issue #25
new Regex( new Regex(
@"^(?!Vol)(?<Series>.+?)( |_)i(ssue)( |_) #(?<Chapter>\d*)", @"^(?!Vol)(?<Series>.+?)( |_)i(ssue)( |_) #(?<Chapter>\d*)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), // spawn-123, spawn-chapter-123 (from https://github.com/Girbons/comics-downloader)
new Regex(
@"^(?<Series>.+?)-(chapter-)?(?<Chapter>\d+)",
MatchOptions, RegexTimeout),
// Cyberpunk 2077 - Your Voice 01
// new Regex(
// @"^(?<Series>.+?\s?-\s?(?:.+?))(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)$",
// MatchOptions,
// RegexTimeout),
}; };
private static readonly Regex[] ReleaseGroupRegex = new[] private static readonly Regex[] ReleaseGroupRegex = new[]
{ {
// [TrinityBAKumA Finella&anon], [BAA]_, [SlowManga&OverloadScans], [batoto] // [TrinityBAKumA Finella&anon], [BAA]_, [SlowManga&OverloadScans], [batoto]
new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)", new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// (Shadowcat-Empire), // (Shadowcat-Empire),
// new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)", // new Regex(@"(?:\[(?<subgroup>(?!\s).+?(?<!\s))\](?:_|-|\s|\.)?)",
// RegexOptions.IgnoreCase | RegexOptions.Compiled), // MatchOptions),
}; };
private static readonly Regex[] MangaChapterRegex = new[] private static readonly Regex[] MangaChapterRegex = new[]
@ -434,76 +374,62 @@ namespace API.Parser
// Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5 // Historys Strongest Disciple Kenichi_v11_c90-98.zip, ...c90.5-100.5
new Regex( new Regex(
@"(\b|_)(c|ch)(\.?\s?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)", @"(\b|_)(c|ch)(\.?\s?)(?<Chapter>(\d+(\.\d)?)-?(\d+(\.\d)?)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip // [Suihei Kiki]_Kasumi_Otoko_no_Ko_[Taruby]_v1.1.zip
new Regex( new Regex(
@"v\d+\.(?<Chapter>\d+(?:.\d+|-\d+)?)", @"v\d+\.(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove) // Umineko no Naku Koro ni - Episode 3 - Banquet of the Golden Witch #02.cbz (Rare case, if causes issue remove)
new Regex( new Regex(
@"^(?<Series>.*)(?: |_)#(?<Chapter>\d+)", @"^(?<Series>.*)(?: |_)#(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Green Worldz - Chapter 027, Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 11-10 // Green Worldz - Chapter 027, Kimi no Koto ga Daidaidaidaidaisuki na 100-nin no Kanojo Chapter 11-10
new Regex( new Regex(
@"^(?!Vol)(?<Series>.*)\s?(?<!vol\. )\sChapter\s(?<Chapter>\d+(?:\.?[\d-]+)?)", @"^(?!Vol)(?<Series>.*)\s?(?<!vol\. )\sChapter\s(?<Chapter>\d+(?:\.?[\d-]+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz // Hinowa ga CRUSH! 018 (2019) (Digital) (LuCaZ).cbz, Hinowa ga CRUSH! 018.5 (2019) (Digital) (LuCaZ).cbz
new Regex( new Regex(
@"^(?!Vol)(?<Series>.+?)\s(?<!vol\. )(?<Chapter>\d+(?:.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)", @"^(?!Vol)(?<Series>.+?)(?<!Vol)\.?\s(?<Chapter>\d+(?:.\d+|-\d+)?)(?:\s\(\d{4}\))?(\b|_|-)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Tower Of God S01 014 (CBT) (digital).cbz // Tower Of God S01 014 (CBT) (digital).cbz
new Regex( new Regex(
@"(?<Series>.*)\sS(?<Volume>\d+)\s(?<Chapter>\d+(?:.\d+|-\d+)?)", @"(?<Series>.*)\sS(?<Volume>\d+)\s(?<Chapter>\d+(?:.\d+|-\d+)?)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip // Beelzebub_01_[Noodles].zip, Beelzebub_153b_RHS.zip
new Regex( new Regex(
@"^((?!v|vo|vol|Volume).)*(\s|_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<ChapterPart>b)?(\s|_|\[|\()", @"^((?!v|vo|vol|Volume).)*(\s|_)(?<Chapter>\.?\d+(?:.\d+|-\d+)?)(?<Part>b)?(\s|_|\[|\()",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Yumekui-Merry_DKThias_Chapter21.zip // Yumekui-Merry_DKThias_Chapter21.zip
new Regex( new Regex(
@"Chapter(?<Chapter>\d+(-\d+)?)", //(?:.\d+|-\d+)? @"Chapter(?<Chapter>\d+(-\d+)?)", //(?:.\d+|-\d+)?
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar // [Hidoi]_Amaenaideyo_MS_vol01_chp02.rar
new Regex( new Regex(
@"(?<Series>.*)(\s|_)(vol\d+)?(\s|_)Chp\.? ?(?<Chapter>\d+)", @"(?<Series>.*)(\s|_)(vol\d+)?(\s|_)Chp\.? ?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Vol 1 Chapter 2 // Vol 1 Chapter 2
new Regex( new Regex(
@"(?<Volume>((vol|volume|v))?(\s|_)?\.?\d+)(\s|_)(Chp|Chapter)\.?(\s|_)?(?<Chapter>\d+)", @"(?<Volume>((vol|volume|v))?(\s|_)?\.?\d+)(\s|_)(Chp|Chapter)\.?(\s|_)?(?<Chapter>\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
}; };
private static readonly Regex[] MangaEditionRegex = { private static readonly Regex[] MangaEditionRegex = {
// Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz // Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz
new Regex( new Regex(
@"(?<Edition>({|\(|\[).* Edition(}|\)|\]))", @"(?<Edition>({|\(|\[).* Edition(}|\)|\]))",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz // Tenjo Tenge {Full Contact Edition} v01 (2011) (Digital) (ASTC).cbz
new Regex( new Regex(
@"(\b|_)(?<Edition>Omnibus(( |_)?Edition)?)(\b|_)?", @"(\b|_)(?<Edition>Omnibus(( |_)?Edition)?)(\b|_)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// To Love Ru v01 Uncensored (Ch.001-007) // To Love Ru v01 Uncensored (Ch.001-007)
new Regex( new Regex(
@"(\b|_)(?<Edition>Uncensored)(\b|_)", @"(\b|_)(?<Edition>Uncensored)(\b|_)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz // AKIRA - c003 (v01) [Full Color] [Darkhorse].cbz
new Regex( new Regex(
@"(\b|_)(?<Edition>Full(?: |_)Color)(\b|_)?", @"(\b|_)(?<Edition>Full(?: |_)Color)(\b|_)?",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
}; };
private static readonly Regex[] CleanupRegex = private static readonly Regex[] CleanupRegex =
@ -511,18 +437,15 @@ namespace API.Parser
// (), {}, [] // (), {}, []
new Regex( new Regex(
@"(?<Cleanup>(\{\}|\[\]|\(\)))", @"(?<Cleanup>(\{\}|\[\]|\(\)))",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// (Complete) // (Complete)
new Regex( new Regex(
@"(?<Cleanup>(\{Complete\}|\[Complete\]|\(Complete\)))", @"(?<Cleanup>(\{Complete\}|\[Complete\]|\(Complete\)))",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
// Anything in parenthesis // Anything in parenthesis
new Regex( new Regex(
@"\(.*\)", @"\(.*\)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout),
}; };
private static readonly Regex[] MangaSpecialRegex = private static readonly Regex[] MangaSpecialRegex =
@ -530,15 +453,21 @@ namespace API.Parser
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle. // All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
new Regex( new Regex(
@"(?<Special>Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)", @"(?<Special>Specials?|OneShot|One\-Shot|Omake|Extra( Chapter)?|Art Collection|Side( |_)Stories|Bonus)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout),
RegexTimeout), };
private static readonly Regex[] ComicSpecialRegex =
{
// All Keywords, does not account for checking if contains volume/chapter identification. Parser.Parse() will handle.
new Regex(
@"(?<Special>Specials?|OneShot|One\-Shot|Extra( Chapter)?|Book \d.+?|Compendium \d.+?|Omnibus \d.+?|[_\s\-]TPB[_\s\-]|FCBD \d.+?|Absolute \d.+?|Preview \d.+?|Art Collection|Side( |_)Stories|Bonus)",
MatchOptions, RegexTimeout),
}; };
// If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found. // If SP\d+ is in the filename, we force treat it as a special regardless if volume or chapter might have been found.
private static readonly Regex SpecialMarkerRegex = new Regex( private static readonly Regex SpecialMarkerRegex = new Regex(
@"(?<Special>SP\d+)", @"(?<Special>SP\d+)",
RegexOptions.IgnoreCase | RegexOptions.Compiled, MatchOptions, RegexTimeout
RegexTimeout
); );
@ -552,7 +481,7 @@ namespace API.Parser
/// <returns><see cref="ParserInfo"/> or null if Series was empty</returns> /// <returns><see cref="ParserInfo"/> or null if Series was empty</returns>
public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga) public static ParserInfo Parse(string filePath, string rootPath, LibraryType type = LibraryType.Manga)
{ {
var fileName = Path.GetFileName(filePath); var fileName = Path.GetFileNameWithoutExtension(filePath);
ParserInfo ret; ParserInfo ret;
if (IsEpub(filePath)) if (IsEpub(filePath))
@ -562,7 +491,7 @@ namespace API.Parser
Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName), Chapters = ParseChapter(fileName) ?? ParseComicChapter(fileName),
Series = ParseSeries(fileName) ?? ParseComicSeries(fileName), Series = ParseSeries(fileName) ?? ParseComicSeries(fileName),
Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName), Volumes = ParseVolume(fileName) ?? ParseComicVolume(fileName),
Filename = fileName, Filename = Path.GetFileName(filePath),
Format = ParseFormat(filePath), Format = ParseFormat(filePath),
FullFilePath = filePath FullFilePath = filePath
}; };
@ -574,14 +503,14 @@ namespace API.Parser
Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName), Chapters = type == LibraryType.Manga ? ParseChapter(fileName) : ParseComicChapter(fileName),
Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName), Series = type == LibraryType.Manga ? ParseSeries(fileName) : ParseComicSeries(fileName),
Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName), Volumes = type == LibraryType.Manga ? ParseVolume(fileName) : ParseComicVolume(fileName),
Filename = fileName, Filename = Path.GetFileName(filePath),
Format = ParseFormat(filePath), Format = ParseFormat(filePath),
Title = Path.GetFileNameWithoutExtension(fileName), Title = Path.GetFileNameWithoutExtension(fileName),
FullFilePath = filePath FullFilePath = filePath
}; };
} }
if (IsImage(filePath) && IsCoverImage(fileName)) return null; if (IsImage(filePath) && IsCoverImage(filePath)) return null;
if (IsImage(filePath)) if (IsImage(filePath))
{ {
@ -600,7 +529,7 @@ namespace API.Parser
var edition = ParseEdition(fileName); var edition = ParseEdition(fileName);
if (!string.IsNullOrEmpty(edition)) if (!string.IsNullOrEmpty(edition))
{ {
ret.Series = CleanTitle(ret.Series.Replace(edition, "")); ret.Series = CleanTitle(ret.Series.Replace(edition, ""), type is LibraryType.Comic);
ret.Edition = edition; ret.Edition = edition;
} }
@ -625,11 +554,11 @@ namespace API.Parser
if (string.IsNullOrEmpty(ret.Series)) if (string.IsNullOrEmpty(ret.Series))
{ {
ret.Series = CleanTitle(fileName); ret.Series = CleanTitle(fileName, type is LibraryType.Comic);
} }
// Pdfs may have .pdf in the series name, remove that // Pdfs may have .pdf in the series name, remove that
if (IsPdf(fileName) && ret.Series.ToLower().EndsWith(".pdf")) if (IsPdf(filePath) && ret.Series.ToLower().EndsWith(".pdf"))
{ {
ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length); ret.Series = ret.Series.Substring(0, ret.Series.Length - ".pdf".Length);
} }
@ -673,7 +602,7 @@ namespace API.Parser
if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1)) if ((string.IsNullOrEmpty(series) && i == fallbackFolders.Count - 1))
{ {
ret.Series = CleanTitle(folder); ret.Series = CleanTitle(folder, type is LibraryType.Comic);
break; break;
} }
@ -750,6 +679,23 @@ namespace API.Parser
return string.Empty; return string.Empty;
} }
public static string ParseComicSpecial(string filePath)
{
foreach (var regex in ComicSpecialRegex)
{
var matches = regex.Matches(filePath);
foreach (Match match in matches)
{
if (match.Groups["Special"].Success && match.Groups["Special"].Value != string.Empty)
{
return match.Groups["Special"].Value;
}
}
}
return string.Empty;
}
public static string ParseSeries(string filename) public static string ParseSeries(string filename)
{ {
foreach (var regex in MangaSeriesRegex) foreach (var regex in MangaSeriesRegex)
@ -775,7 +721,7 @@ namespace API.Parser
{ {
if (match.Groups["Series"].Success && match.Groups["Series"].Value != string.Empty) if (match.Groups["Series"].Success && match.Groups["Series"].Value != string.Empty)
{ {
return CleanTitle(match.Groups["Series"].Value); return CleanTitle(match.Groups["Series"].Value, true);
} }
} }
} }
@ -793,12 +739,8 @@ namespace API.Parser
if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue; if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue;
var value = match.Groups["Volume"].Value; var value = match.Groups["Volume"].Value;
if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Volume"].Value); var hasPart = match.Groups["Part"].Success;
var tokens = value.Split("-"); return FormatValue(value, hasPart);
var from = RemoveLeadingZeroes(tokens[0]);
var to = RemoveLeadingZeroes(tokens[1]);
return $"{@from}-{to}";
} }
} }
@ -815,18 +757,32 @@ namespace API.Parser
if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue; if (!match.Groups["Volume"].Success || match.Groups["Volume"] == Match.Empty) continue;
var value = match.Groups["Volume"].Value; var value = match.Groups["Volume"].Value;
if (!value.Contains("-")) return RemoveLeadingZeroes(match.Groups["Volume"].Value); var hasPart = match.Groups["Part"].Success;
var tokens = value.Split("-"); return FormatValue(value, hasPart);
var from = RemoveLeadingZeroes(tokens[0]);
var to = RemoveLeadingZeroes(tokens[1]);
return $"{@from}-{to}";
} }
} }
return DefaultVolume; return DefaultVolume;
} }
private static string FormatValue(string value, bool hasPart)
{
if (!value.Contains("-"))
{
return RemoveLeadingZeroes(hasPart ? AddChapterPart(value) : value);
}
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
if (tokens.Length == 2)
{
var to = RemoveLeadingZeroes(hasPart ? AddChapterPart(tokens[1]) : tokens[1]);
return $"{@from}-{to}";
}
return @from;
}
public static string ParseChapter(string filename) public static string ParseChapter(string filename)
{ {
foreach (var regex in MangaChapterRegex) foreach (var regex in MangaChapterRegex)
@ -837,24 +793,9 @@ namespace API.Parser
if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue; if (!match.Groups["Chapter"].Success || match.Groups["Chapter"] == Match.Empty) continue;
var value = match.Groups["Chapter"].Value; var value = match.Groups["Chapter"].Value;
var hasChapterPart = match.Groups["ChapterPart"].Success; var hasPart = match.Groups["Part"].Success;
if (!value.Contains("-"))
{
return RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(value) : value);
}
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
if (tokens.Length == 2)
{
var to = RemoveLeadingZeroes(hasChapterPart ? AddChapterPart(tokens[1]) : tokens[1]);
return $"{@from}-{to}";
}
return from;
return FormatValue(value, hasPart);
} }
} }
@ -881,16 +822,8 @@ namespace API.Parser
if (match.Groups["Chapter"].Success && match.Groups["Chapter"] != Match.Empty) if (match.Groups["Chapter"].Success && match.Groups["Chapter"] != Match.Empty)
{ {
var value = match.Groups["Chapter"].Value; var value = match.Groups["Chapter"].Value;
var hasPart = match.Groups["Part"].Success;
if (value.Contains("-")) return FormatValue(value, hasPart);
{
var tokens = value.Split("-");
var from = RemoveLeadingZeroes(tokens[0]);
var to = RemoveLeadingZeroes(tokens[1]);
return $"{from}-{to}";
}
return RemoveLeadingZeroes(match.Groups["Chapter"].Value);
} }
} }
@ -908,12 +841,30 @@ namespace API.Parser
{ {
if (match.Success) if (match.Success)
{ {
title = title.Replace(match.Value, "").Trim(); title = title.Replace(match.Value, string.Empty).Trim();
} }
} }
} }
// TODO: Since we have loops like this, think about using a method
foreach (var regex in MangaEditionRegex) foreach (var regex in MangaEditionRegex)
{
var matches = regex.Matches(title);
foreach (Match match in matches)
{
if (match.Success)
{
title = title.Replace(match.Value, string.Empty).Trim();
}
}
}
return title;
}
private static string RemoveMangaSpecialTags(string title)
{
foreach (var regex in MangaSpecialRegex)
{ {
var matches = regex.Matches(title); var matches = regex.Matches(title);
foreach (Match match in matches) foreach (Match match in matches)
@ -928,9 +879,9 @@ namespace API.Parser
return title; return title;
} }
private static string RemoveSpecialTags(string title) private static string RemoveComicSpecialTags(string title)
{ {
foreach (var regex in MangaSpecialRegex) foreach (var regex in ComicSpecialRegex)
{ {
var matches = regex.Matches(title); var matches = regex.Matches(title);
foreach (Match match in matches) foreach (Match match in matches)
@ -954,14 +905,16 @@ namespace API.Parser
/// </example> /// </example>
/// </summary> /// </summary>
/// <param name="title"></param> /// <param name="title"></param>
/// <param name="isComic"></param>
/// <returns></returns> /// <returns></returns>
public static string CleanTitle(string title) public static string CleanTitle(string title, bool isComic = false)
{ {
title = RemoveReleaseGroup(title); title = RemoveReleaseGroup(title);
title = RemoveEditionTagHolders(title); title = RemoveEditionTagHolders(title);
title = RemoveSpecialTags(title); title = isComic ? RemoveComicSpecialTags(title) : RemoveMangaSpecialTags(title);
title = title.Replace("_", " ").Trim(); title = title.Replace("_", " ").Trim();
if (title.EndsWith("-") || title.EndsWith(",")) if (title.EndsWith("-") || title.EndsWith(","))
@ -1009,7 +962,7 @@ namespace API.Parser
private static string PerformPadding(string number) private static string PerformPadding(string number)
{ {
var num = Int32.Parse(number); var num = int.Parse(number);
return num switch return num switch
{ {
< 10 => "00" + num, < 10 => "00" + num,
@ -1064,7 +1017,7 @@ namespace API.Parser
public static string Normalize(string name) public static string Normalize(string name)
{ {
return Regex.Replace(name.ToLower(), "[^a-zA-Z0-9]", string.Empty); return NormalizeRegex.Replace(name, string.Empty).ToLower();
} }

View File

@ -1,19 +1,11 @@
using System; using System;
using System.Collections.Generic;
using System.Data;
using System.IO; using System.IO;
using System.Linq;
using System.Security.Cryptography; using System.Security.Cryptography;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Data; using API.Data;
using API.Entities; using API.Entities;
using API.Helpers;
using API.Interfaces;
using API.Services; using API.Services;
using Kavita.Common; using Kavita.Common;
using Kavita.Common.EnvironmentInfo;
using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Identity; using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Server.Kestrel.Core; using Microsoft.AspNetCore.Server.Kestrel.Core;
@ -21,9 +13,6 @@ using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.IO;
using NetVips;
using Sentry;
namespace API namespace API
{ {
@ -103,62 +92,6 @@ namespace API
opts.ListenAnyIP(HttpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; }); opts.ListenAnyIP(HttpPort, options => { options.Protocols = HttpProtocols.Http1AndHttp2; });
}); });
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT");
if (environment != Environments.Development)
{
webBuilder.UseSentry(options =>
{
options.Dsn = "https://40f4e7b49c094172a6f99d61efb2740f@o641015.ingest.sentry.io/5757423";
options.MaxBreadcrumbs = 200;
options.AttachStacktrace = true;
options.Debug = false;
options.SendDefaultPii = false;
options.DiagnosticLevel = SentryLevel.Debug;
options.ShutdownTimeout = TimeSpan.FromSeconds(5);
options.Release = BuildInfo.Version.ToString();
options.AddExceptionFilterForType<OutOfMemoryException>();
options.AddExceptionFilterForType<NetVips.VipsException>();
options.AddExceptionFilterForType<InvalidDataException>();
options.AddExceptionFilterForType<KavitaException>();
options.BeforeSend = sentryEvent =>
{
if (sentryEvent.Exception != null
&& sentryEvent.Exception.Message.StartsWith("[GetCoverImage]")
&& sentryEvent.Exception.Message.StartsWith("[BookService]")
&& sentryEvent.Exception.Message.StartsWith("[ExtractArchive]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetSummaryInfo]")
&& sentryEvent.Exception.Message.StartsWith("[GetNumberOfPagesFromArchive]")
&& sentryEvent.Exception.Message.Contains("EPUB parsing error")
&& sentryEvent.Exception.Message.Contains("Unsupported EPUB version")
&& sentryEvent.Exception.Message.Contains("Incorrect EPUB")
&& sentryEvent.Exception.Message.Contains("Access is Denied"))
{
return null; // Don't send this event to Sentry
}
sentryEvent.ServerName = null; // Never send Server Name to Sentry
return sentryEvent;
};
options.ConfigureScope(scope =>
{
scope.User = new User()
{
Id = HashUtil.AnonymousToken()
};
scope.Contexts.App.Name = BuildInfo.AppName;
scope.Contexts.App.Version = BuildInfo.Version.ToString();
scope.Contexts.App.StartTime = DateTime.UtcNow;
scope.Contexts.App.Hash = HashUtil.AnonymousToken();
scope.Contexts.App.Build = BuildInfo.Release;
scope.SetTag("culture", Thread.CurrentThread.CurrentCulture.Name);
scope.SetTag("branch", BuildInfo.Branch);
});
});
}
webBuilder.UseStartup<Startup>(); webBuilder.UseStartup<Startup>();
}); });
} }

View File

@ -0,0 +1,53 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using API.Entities;
using API.Errors;
using API.Interfaces.Services;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging;
namespace API.Services
{
public class AccountService : IAccountService
{
private readonly UserManager<AppUser> _userManager;
private readonly ILogger<AccountService> _logger;
public const string DefaultPassword = "[k.2@RZ!mxCQkJzE";
public AccountService(UserManager<AppUser> userManager, ILogger<AccountService> logger)
{
_userManager = userManager;
_logger = logger;
}
public async Task<IEnumerable<ApiException>> ChangeUserPassword(AppUser user, string newPassword)
{
foreach (var validator in _userManager.PasswordValidators)
{
var validationResult = await validator.ValidateAsync(_userManager, user, newPassword);
if (!validationResult.Succeeded)
{
return validationResult.Errors.Select(e => new ApiException(400, e.Code, e.Description));
}
}
var result = await _userManager.RemovePasswordAsync(user);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return result.Errors.Select(e => new ApiException(400, e.Code, e.Description));
}
result = await _userManager.AddPasswordAsync(user, newPassword);
if (!result.Succeeded)
{
_logger.LogError("Could not update password");
return result.Errors.Select(e => new ApiException(400, e.Code, e.Description));
}
return new List<ApiException>();
}
}
}

View File

@ -8,15 +8,14 @@ using System.Threading.Tasks;
using System.Xml.Serialization; using System.Xml.Serialization;
using API.Archive; using API.Archive;
using API.Comparators; using API.Comparators;
using API.Data.Metadata;
using API.Extensions; using API.Extensions;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Services.Tasks; using API.Services.Tasks;
using Kavita.Common; using Kavita.Common;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.IO;
using SharpCompress.Archives; using SharpCompress.Archives;
using SharpCompress.Common; using SharpCompress.Common;
using Image = NetVips.Image;
namespace API.Services namespace API.Services
{ {
@ -28,14 +27,12 @@ namespace API.Services
{ {
private readonly ILogger<ArchiveService> _logger; private readonly ILogger<ArchiveService> _logger;
private readonly IDirectoryService _directoryService; private readonly IDirectoryService _directoryService;
private readonly NaturalSortComparer _comparer;
private const string ComicInfoFilename = "comicinfo"; private const string ComicInfoFilename = "comicinfo";
public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService) public ArchiveService(ILogger<ArchiveService> logger, IDirectoryService directoryService)
{ {
_logger = logger; _logger = logger;
_directoryService = directoryService; _directoryService = directoryService;
_comparer = new NaturalSortComparer();
} }
/// <summary> /// <summary>
@ -81,13 +78,11 @@ namespace API.Services
{ {
case ArchiveLibrary.Default: case ArchiveLibrary.Default:
{ {
_logger.LogDebug("Using default compression handling"); using var archive = ZipFile.OpenRead(archivePath);
using ZipArchive archive = ZipFile.OpenRead(archivePath);
return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName)); return archive.Entries.Count(e => !Parser.Parser.HasBlacklistedFolderInPath(e.FullName) && Parser.Parser.IsImage(e.FullName));
} }
case ArchiveLibrary.SharpCompress: case ArchiveLibrary.SharpCompress:
{ {
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath); using var archive = ArchiveFactory.Open(archivePath);
return archive.Entries.Count(entry => !entry.IsDirectory && return archive.Entries.Count(entry => !entry.IsDirectory &&
!Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)
@ -130,7 +125,7 @@ namespace API.Services
/// <returns>Entry name of match, null if no match</returns> /// <returns>Entry name of match, null if no match</returns>
public string FirstFileEntry(IEnumerable<string> entryFullNames) public string FirstFileEntry(IEnumerable<string> entryFullNames)
{ {
var result = entryFullNames.OrderBy(Path.GetFileName, _comparer) var result = entryFullNames.OrderBy(Path.GetFileName, new NaturalSortComparer())
.FirstOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x) .FirstOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x)
&& Parser.Parser.IsImage(x) && Parser.Parser.IsImage(x)
&& !x.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)); && !x.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith));
@ -160,7 +155,6 @@ namespace API.Services
{ {
case ArchiveLibrary.Default: case ArchiveLibrary.Default:
{ {
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath); using var archive = ZipFile.OpenRead(archivePath);
var entryNames = archive.Entries.Select(e => e.FullName).ToArray(); var entryNames = archive.Entries.Select(e => e.FullName).ToArray();
@ -172,7 +166,6 @@ namespace API.Services
} }
case ArchiveLibrary.SharpCompress: case ArchiveLibrary.SharpCompress:
{ {
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath); using var archive = ArchiveFactory.Open(archivePath);
var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList(); var entryNames = archive.Entries.Where(archiveEntry => !archiveEntry.IsDirectory).Select(e => e.Key).ToList();
@ -301,66 +294,69 @@ namespace API.Services
return null; return null;
} }
public string GetSummaryInfo(string archivePath) public ComicInfo GetComicInfo(string archivePath)
{ {
var summary = string.Empty; if (!IsValidArchive(archivePath)) return null;
if (!IsValidArchive(archivePath)) return summary;
ComicInfo info = null;
try try
{ {
if (!File.Exists(archivePath)) return summary; if (!File.Exists(archivePath)) return null;
var libraryHandler = CanOpen(archivePath); var libraryHandler = CanOpen(archivePath);
switch (libraryHandler) switch (libraryHandler)
{ {
case ArchiveLibrary.Default: case ArchiveLibrary.Default:
{ {
_logger.LogTrace("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath); using var archive = ZipFile.OpenRead(archivePath);
var entry = archive.Entries.SingleOrDefault(x => !Parser.Parser.HasBlacklistedFolderInPath(x.FullName) var entry = archive.Entries.SingleOrDefault(x =>
&& Path.GetFileNameWithoutExtension(x.Name)?.ToLower() == ComicInfoFilename !Parser.Parser.HasBlacklistedFolderInPath(x.FullName)
&& !Path.GetFileNameWithoutExtension(x.Name).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith) && Path.GetFileNameWithoutExtension(x.Name)?.ToLower() == ComicInfoFilename
&& Parser.Parser.IsXml(x.FullName)); && !Path.GetFileNameWithoutExtension(x.Name)
.StartsWith(Parser.Parser.MacOsMetadataFileStartsWith)
&& Parser.Parser.IsXml(x.FullName));
if (entry != null) if (entry != null)
{ {
using var stream = entry.Open(); using var stream = entry.Open();
var serializer = new XmlSerializer(typeof(ComicInfo)); var serializer = new XmlSerializer(typeof(ComicInfo));
info = (ComicInfo) serializer.Deserialize(stream); return (ComicInfo) serializer.Deserialize(stream);
} }
break; break;
} }
case ArchiveLibrary.SharpCompress: case ArchiveLibrary.SharpCompress:
{ {
_logger.LogTrace("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath); using var archive = ArchiveFactory.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory return FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) && !Parser.Parser
&& !Path.GetFileNameWithoutExtension(entry.Key).StartsWith(Parser.Parser.MacOsMetadataFileStartsWith) .HasBlacklistedFolderInPath(
Path.GetDirectoryName(
entry.Key) ?? string.Empty)
&& !Path
.GetFileNameWithoutExtension(
entry.Key).StartsWith(Parser
.Parser
.MacOsMetadataFileStartsWith)
&& Parser.Parser.IsXml(entry.Key))); && Parser.Parser.IsXml(entry.Key)));
break;
} }
case ArchiveLibrary.NotSupported: case ArchiveLibrary.NotSupported:
_logger.LogWarning("[GetSummaryInfo] This archive cannot be read: {ArchivePath}", archivePath); _logger.LogWarning("[GetComicInfo] This archive cannot be read: {ArchivePath}", archivePath);
return summary; return null;
default: default:
_logger.LogWarning("[GetSummaryInfo] There was an exception when reading archive stream: {ArchivePath}", archivePath); _logger.LogWarning(
return summary; "[GetComicInfo] There was an exception when reading archive stream: {ArchivePath}",
} archivePath);
return null;
if (info != null)
{
return info.Summary;
} }
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogWarning(ex, "[GetSummaryInfo] There was an exception when reading archive stream: {Filepath}", archivePath); _logger.LogWarning(ex, "[GetComicInfo] There was an exception when reading archive stream: {Filepath}", archivePath);
} }
return summary; return null;
} }
private static void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath) private static void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath)
{ {
DirectoryService.ExistOrCreate(extractPath); DirectoryService.ExistOrCreate(extractPath);
@ -410,14 +406,12 @@ namespace API.Services
{ {
case ArchiveLibrary.Default: case ArchiveLibrary.Default:
{ {
_logger.LogDebug("Using default compression handling");
using var archive = ZipFile.OpenRead(archivePath); using var archive = ZipFile.OpenRead(archivePath);
ExtractArchiveEntries(archive, extractPath); ExtractArchiveEntries(archive, extractPath);
break; break;
} }
case ArchiveLibrary.SharpCompress: case ArchiveLibrary.SharpCompress:
{ {
_logger.LogDebug("Using SharpCompress compression handling");
using var archive = ArchiveFactory.Open(archivePath); using var archive = ArchiveFactory.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory
&& !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty) && !Parser.Parser.HasBlacklistedFolderInPath(Path.GetDirectoryName(entry.Key) ?? string.Empty)

View File

@ -4,12 +4,12 @@ using System.Drawing;
using System.Drawing.Imaging; using System.Drawing.Imaging;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Net;
using System.Runtime.InteropServices; using System.Runtime.InteropServices;
using System.Text; using System.Text;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Web; using System.Web;
using API.Data.Metadata;
using API.Entities.Enums; using API.Entities.Enums;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Parser; using API.Parser;
@ -165,22 +165,43 @@ namespace API.Services
return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss()); return RemoveWhiteSpaceFromStylesheets(stylesheet.ToCss());
} }
public string GetSummaryInfo(string filePath) public ComicInfo GetComicInfo(string filePath)
{ {
if (!IsValidFile(filePath) || Parser.Parser.IsPdf(filePath)) return string.Empty; if (!IsValidFile(filePath) || Parser.Parser.IsPdf(filePath)) return null;
try try
{ {
using var epubBook = EpubReader.OpenBook(filePath); using var epubBook = EpubReader.OpenBook(filePath);
return epubBook.Schema.Package.Metadata.Description; var publicationDate =
epubBook.Schema.Package.Metadata.Dates.FirstOrDefault(date => date.Event == "publication")?.Date;
var info = new ComicInfo()
{
Summary = epubBook.Schema.Package.Metadata.Description,
Writer = string.Join(",", epubBook.Schema.Package.Metadata.Creators),
Publisher = string.Join(",", epubBook.Schema.Package.Metadata.Publishers),
Month = !string.IsNullOrEmpty(publicationDate) ? DateTime.Parse(publicationDate).Month : 0,
Year = !string.IsNullOrEmpty(publicationDate) ? DateTime.Parse(publicationDate).Year : 0,
};
// Parse tags not exposed via Library
foreach (var metadataItem in epubBook.Schema.Package.Metadata.MetaItems)
{
switch (metadataItem.Name)
{
case "calibre:rating":
info.UserRating = float.Parse(metadataItem.Content);
break;
}
}
return info;
} }
catch (Exception ex) catch (Exception ex)
{ {
_logger.LogWarning(ex, "[BookService] There was an exception getting summary, defaulting to empty string"); _logger.LogWarning(ex, "[GetComicInfo] There was an exception getting metadata");
} }
return string.Empty; return null;
} }
private bool IsValidFile(string filePath) private bool IsValidFile(string filePath)
@ -393,7 +414,7 @@ namespace API.Services
/// <returns></returns> /// <returns></returns>
public string GetCoverImage(string fileFilePath, string fileName) public string GetCoverImage(string fileFilePath, string fileName)
{ {
if (!IsValidFile(fileFilePath)) return String.Empty; if (!IsValidFile(fileFilePath)) return string.Empty;
if (Parser.Parser.IsPdf(fileFilePath)) if (Parser.Parser.IsPdf(fileFilePath))
{ {
@ -411,8 +432,8 @@ namespace API.Services
?? epubBook.Content.Images.Values.FirstOrDefault(); ?? epubBook.Content.Images.Values.FirstOrDefault();
if (coverImageContent == null) return string.Empty; if (coverImageContent == null) return string.Empty;
using var stream = coverImageContent.GetContentStream();
using var stream = StreamManager.GetStream("BookService.GetCoverImage", coverImageContent.ReadContent());
return ImageService.WriteCoverThumbnail(stream, fileName); return ImageService.WriteCoverThumbnail(stream, fileName);
} }
catch (Exception ex) catch (Exception ex)

View File

@ -1,16 +0,0 @@
namespace API.Services
{
public class ComicInfo
{
public string Summary { get; set; }
public string Title { get; set; }
public string Series { get; set; }
public string Notes { get; set; }
public string Publisher { get; set; }
public string Genre { get; set; }
public int PageCount { get; set; }
// ReSharper disable once InconsistentNaming
public string LanguageISO { get; set; }
public string Web { get; set; }
}
}

View File

@ -46,7 +46,7 @@ namespace API.Services
var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions) var firstImage = _directoryService.GetFilesWithExtension(directory, Parser.Parser.ImageFileExtensions)
.OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault(); .OrderBy(f => f, new NaturalSortComparer()).FirstOrDefault();
return firstImage; return firstImage;
} }
@ -73,7 +73,7 @@ namespace API.Services
{ {
using var thumbnail = Image.Thumbnail(path, ThumbnailWidth); using var thumbnail = Image.Thumbnail(path, ThumbnailWidth);
var filename = fileName + ".png"; var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png")); thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, filename));
return filename; return filename;
} }
catch (Exception e) catch (Exception e)
@ -93,7 +93,7 @@ namespace API.Services
/// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns> /// <returns>File name with extension of the file. This will always write to <see cref="DirectoryService.CoverImageDirectory"/></returns>
public static string WriteCoverThumbnail(Stream stream, string fileName) public static string WriteCoverThumbnail(Stream stream, string fileName)
{ {
using var thumbnail = NetVips.Image.ThumbnailStream(stream, ThumbnailWidth); using var thumbnail = Image.ThumbnailStream(stream, ThumbnailWidth);
var filename = fileName + ".png"; var filename = fileName + ".png";
thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png")); thumbnail.WriteToFile(Path.Join(DirectoryService.CoverImageDirectory, fileName + ".png"));
return filename; return filename;

View File

@ -1,13 +1,15 @@
using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.Data.Metadata;
using API.Data.Repositories;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.SignalR; using API.SignalR;
@ -74,7 +76,7 @@ namespace API.Services
private string GetCoverImage(MangaFile file, int volumeId, int chapterId) private string GetCoverImage(MangaFile file, int volumeId, int chapterId)
{ {
file.LastModified = DateTime.Now; file.UpdateLastModified();
switch (file.Format) switch (file.Format)
{ {
case MangaFormat.Pdf: case MangaFormat.Pdf:
@ -102,6 +104,7 @@ namespace API.Services
if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked)) if (ShouldUpdateCoverImage(chapter.CoverImage, firstFile, forceUpdate, chapter.CoverImageLocked))
{ {
_logger.LogDebug("[MetadataService] Generating cover image for {File}", firstFile?.FilePath);
chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id); chapter.CoverImage = GetCoverImage(firstFile, chapter.VolumeId, chapter.Id);
return true; return true;
} }
@ -117,8 +120,7 @@ namespace API.Services
public bool UpdateMetadata(Volume volume, bool forceUpdate) public bool UpdateMetadata(Volume volume, bool forceUpdate)
{ {
// We need to check if Volume coverImage matches first chapters if forceUpdate is false // We need to check if Volume coverImage matches first chapters if forceUpdate is false
if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate if (volume == null || !ShouldUpdateCoverImage(volume.CoverImage, null, forceUpdate)) return false;
, false)) return false;
volume.Chapters ??= new List<Chapter>(); volume.Chapters ??= new List<Chapter>();
var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault(); var firstChapter = volume.Chapters.OrderBy(x => double.Parse(x.Number), _chapterSortComparerForInChapterSorting).FirstOrDefault();
@ -137,6 +139,8 @@ namespace API.Services
{ {
var madeUpdate = false; var madeUpdate = false;
if (series == null) return false; if (series == null) return false;
// NOTE: This will fail if we replace the cover of the first volume on a first scan. Because the series will already have a cover image
if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked)) if (ShouldUpdateCoverImage(series.CoverImage, null, forceUpdate, series.CoverImageLocked))
{ {
series.Volumes ??= new List<Volume>(); series.Volumes ??= new List<Volume>();
@ -167,6 +171,9 @@ namespace API.Services
private bool UpdateSeriesSummary(Series series, bool forceUpdate) private bool UpdateSeriesSummary(Series series, bool forceUpdate)
{ {
// NOTE: This can be problematic when the file changes and a summary already exists, but it is likely
// better to let the user kick off a refresh metadata on an individual Series than having overhead of
// checking File last write time.
if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return false; if (!string.IsNullOrEmpty(series.Summary) && !forceUpdate) return false;
var isBook = series.Library.Type == LibraryType.Book; var isBook = series.Library.Type == LibraryType.Book;
@ -177,18 +184,21 @@ namespace API.Services
if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return false; if (firstFile == null || (!forceUpdate && !firstFile.HasFileBeenModified())) return false;
if (Parser.Parser.IsPdf(firstFile.FilePath)) return false; if (Parser.Parser.IsPdf(firstFile.FilePath)) return false;
if (series.Format is MangaFormat.Archive or MangaFormat.Epub) var comicInfo = GetComicInfo(series.Format, firstFile);
if (string.IsNullOrEmpty(comicInfo?.Summary)) return false;
series.Summary = comicInfo.Summary;
return true;
}
private ComicInfo GetComicInfo(MangaFormat format, MangaFile firstFile)
{
if (format is MangaFormat.Archive or MangaFormat.Epub)
{ {
var summary = Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetSummaryInfo(firstFile.FilePath) : _archiveService.GetSummaryInfo(firstFile.FilePath); return Parser.Parser.IsEpub(firstFile.FilePath) ? _bookService.GetComicInfo(firstFile.FilePath) : _archiveService.GetComicInfo(firstFile.FilePath);
if (!string.IsNullOrEmpty(series.Summary))
{
series.Summary = summary;
firstFile.LastModified = DateTime.Now;
return true;
}
} }
firstFile.LastModified = DateTime.Now; // NOTE: Should I put this here as well since it might not have actually been parsed?
return false; return null;
} }
@ -200,34 +210,65 @@ namespace API.Services
/// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param> /// <param name="forceUpdate">Force updating cover image even if underlying file has not been modified or chapter already has a cover image</param>
public async Task RefreshMetadata(int libraryId, bool forceUpdate = false) public async Task RefreshMetadata(int libraryId, bool forceUpdate = false)
{ {
var sw = Stopwatch.StartNew(); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.None);
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId); _logger.LogInformation("[MetadataService] Beginning metadata refresh of {LibraryName}", library.Name);
// PERF: See if we can break this up into multiple threads that process 20 series at a time then save so we can reduce amount of memory used var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
_logger.LogInformation("Beginning metadata refresh of {LibraryName}", library.Name); var stopwatch = Stopwatch.StartNew();
foreach (var series in library.Series) var totalTime = 0L;
_logger.LogDebug($"[MetadataService] Refreshing Library {library.Name}. Total Items: {chunkInfo.TotalSize}. Total Chunks: {chunkInfo.TotalChunks} with {chunkInfo.ChunkSize} size.");
// This technically does
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{ {
var volumeUpdated = false; totalTime += stopwatch.ElapsedMilliseconds;
foreach (var volume in series.Volumes) stopwatch.Restart();
{ _logger.LogDebug($"[MetadataService] Processing chunk {chunk} / {chunkInfo.TotalChunks} with size {chunkInfo.ChunkSize} Series ({chunk * chunkInfo.ChunkSize} - {(chunk + 1) * chunkInfo.ChunkSize}");
var chapterUpdated = false; var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id,
foreach (var chapter in volume.Chapters) new UserParams()
{ {
chapterUpdated = UpdateMetadata(chapter, forceUpdate); PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
_logger.LogDebug($"[MetadataService] Fetched {nonLibrarySeries.Count} series for refresh");
Parallel.ForEach(nonLibrarySeries, series =>
{
_logger.LogDebug("[MetadataService] Processing series {SeriesName}", series.OriginalName);
var volumeUpdated = false;
foreach (var volume in series.Volumes)
{
var chapterUpdated = false;
foreach (var chapter in volume.Chapters)
{
chapterUpdated = UpdateMetadata(chapter, forceUpdate);
}
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate);
} }
volumeUpdated = UpdateMetadata(volume, chapterUpdated || forceUpdate); UpdateMetadata(series, volumeUpdated || forceUpdate);
});
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
foreach (var series in nonLibrarySeries)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(library.Id, series.Id));
}
}
else
{
_logger.LogInformation(
"[MetadataService] Processed {SeriesStart} - {SeriesEnd} out of {TotalSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, chunkInfo.TotalSize, stopwatch.ElapsedMilliseconds, library.Name);
} }
UpdateMetadata(series, volumeUpdated || forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
} }
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesNumber} series in library {LibraryName} in {ElapsedMilliseconds} milliseconds total", chunkInfo.TotalSize, library.Name, totalTime);
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{
_logger.LogInformation("Updated metadata for {LibraryName} in {ElapsedMilliseconds} milliseconds", library.Name, sw.ElapsedMilliseconds);
}
} }
@ -239,15 +280,13 @@ namespace API.Services
public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false) public async Task RefreshMetadataForSeries(int libraryId, int seriesId, bool forceUpdate = false)
{ {
var sw = Stopwatch.StartNew(); var sw = Stopwatch.StartNew();
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId); var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
var series = library.Series.SingleOrDefault(s => s.Id == seriesId);
if (series == null) if (series == null)
{ {
_logger.LogError("Series {SeriesId} was not found on Library {LibraryName}", seriesId, libraryId); _logger.LogError("[MetadataService] Series {SeriesId} was not found on Library {LibraryId}", seriesId, libraryId);
return; return;
} }
_logger.LogInformation("Beginning metadata refresh of {SeriesName}", series.Name); _logger.LogInformation("[MetadataService] Beginning metadata refresh of {SeriesName}", series.Name);
var volumeUpdated = false; var volumeUpdated = false;
foreach (var volume in series.Volumes) foreach (var volume in series.Volumes)
{ {
@ -261,14 +300,14 @@ namespace API.Services
} }
UpdateMetadata(series, volumeUpdated || forceUpdate); UpdateMetadata(series, volumeUpdated || forceUpdate);
_unitOfWork.SeriesRepository.Update(series);
if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync()) if (_unitOfWork.HasChanges() && await _unitOfWork.CommitAsync())
{ {
_logger.LogInformation("Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds); await _messageHub.Clients.All.SendAsync(SignalREvents.RefreshMetadata, MessageFactory.RefreshMetadataEvent(series.LibraryId, series.Id));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.RefreshMetadataEvent(libraryId, seriesId));
} }
_logger.LogInformation("[MetadataService] Updated metadata for {SeriesName} in {ElapsedMilliseconds} milliseconds", series.Name, sw.ElapsedMilliseconds);
} }
} }
} }

View File

@ -1,4 +1,5 @@
using System.IO; using System;
using System.IO;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Entities.Enums; using API.Entities.Enums;
@ -52,27 +53,27 @@ namespace API.Services
var scanLibrarySetting = setting; var scanLibrarySetting = setting;
_logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting); _logger.LogDebug("Scheduling Scan Library Task for {Setting}", scanLibrarySetting);
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(),
() => CronConverter.ConvertToCronNotation(scanLibrarySetting)); () => CronConverter.ConvertToCronNotation(scanLibrarySetting), TimeZoneInfo.Local);
} }
else else
{ {
RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily); RecurringJob.AddOrUpdate("scan-libraries", () => _scannerService.ScanLibraries(), Cron.Daily, TimeZoneInfo.Local);
} }
setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value; setting = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.TaskBackup)).Result.Value;
if (setting != null) if (setting != null)
{ {
_logger.LogDebug("Scheduling Backup Task for {Setting}", setting); _logger.LogDebug("Scheduling Backup Task for {Setting}", setting);
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting)); RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), () => CronConverter.ConvertToCronNotation(setting), TimeZoneInfo.Local);
} }
else else
{ {
RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly); RecurringJob.AddOrUpdate("backup", () => _backupService.BackupDatabase(), Cron.Weekly, TimeZoneInfo.Local);
} }
RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily); RecurringJob.AddOrUpdate("cleanup", () => _cleanupService.Cleanup(), Cron.Daily, TimeZoneInfo.Local);
RecurringJob.AddOrUpdate("check-for-updates", () => _scannerService.ScanLibraries(), Cron.Daily); RecurringJob.AddOrUpdate("check-for-updates", () => _scannerService.ScanLibraries(), Cron.Daily, TimeZoneInfo.Local);
} }
#region StatsTasks #region StatsTasks
@ -88,7 +89,7 @@ namespace API.Services
} }
_logger.LogDebug("Scheduling stat collection daily"); _logger.LogDebug("Scheduling stat collection daily");
RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily); RecurringJob.AddOrUpdate(SendDataTask, () => _statsService.CollectAndSendStatsData(), Cron.Daily, TimeZoneInfo.Local);
} }
public void CancelStatsTasks() public void CancelStatsTasks()
@ -111,7 +112,7 @@ namespace API.Services
public void ScheduleUpdaterTasks() public void ScheduleUpdaterTasks()
{ {
_logger.LogInformation("Scheduling Auto-Update tasks"); _logger.LogInformation("Scheduling Auto-Update tasks");
RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Weekly); RecurringJob.AddOrUpdate("check-updates", () => CheckForUpdate(), Cron.Weekly, TimeZoneInfo.Local);
} }
#endregion #endregion
@ -119,7 +120,7 @@ namespace API.Services
public void ScanLibrary(int libraryId, bool forceUpdate = false) public void ScanLibrary(int libraryId, bool forceUpdate = false)
{ {
_logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId); _logger.LogInformation("Enqueuing library scan for: {LibraryId}", libraryId);
BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId, forceUpdate)); BackgroundJob.Enqueue(() => _scannerService.ScanLibrary(libraryId));
// When we do a scan, force cache to re-unpack in case page numbers change // When we do a scan, force cache to re-unpack in case page numbers change
BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory()); BackgroundJob.Enqueue(() => _cleanupService.CleanupCacheDirectory());
} }
@ -141,7 +142,7 @@ namespace API.Services
BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory)); BackgroundJob.Enqueue(() => DirectoryService.ClearDirectory(tempDirectory));
} }
public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = false) public void RefreshSeriesMetadata(int libraryId, int seriesId, bool forceUpdate = true)
{ {
_logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId); _logger.LogInformation("Enqueuing series metadata refresh for: {SeriesId}", seriesId);
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate)); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
@ -150,7 +151,7 @@ namespace API.Services
public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false) public void ScanSeries(int libraryId, int seriesId, bool forceUpdate = false)
{ {
_logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId); _logger.LogInformation("Enqueuing series scan for: {SeriesId}", seriesId);
BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, forceUpdate, CancellationToken.None)); BackgroundJob.Enqueue(() => _scannerService.ScanSeries(libraryId, seriesId, CancellationToken.None));
} }
public void BackupDatabase() public void BackupDatabase()

View File

@ -125,7 +125,7 @@ namespace API.Services.Tasks
_directoryService.CopyFilesToDirectory( _directoryService.CopyFilesToDirectory(
chapterImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir); chapterImages.Select(s => Path.Join(DirectoryService.CoverImageDirectory, s)), outputTempDir);
} }
catch (IOException e) catch (IOException)
{ {
// Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file. // Swallow exception. This can be a duplicate cover being copied as chapter and volumes can share same file.
} }

View File

@ -1,11 +1,9 @@
using System.IO; using System.IO;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using Hangfire; using Hangfire;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using NetVips;
namespace API.Services.Tasks namespace API.Services.Tasks
{ {

View File

@ -7,9 +7,11 @@ using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Comparators; using API.Comparators;
using API.Data; using API.Data;
using API.Data.Repositories;
using API.Entities; using API.Entities;
using API.Entities.Enums; using API.Entities.Enums;
using API.Extensions; using API.Extensions;
using API.Helpers;
using API.Interfaces; using API.Interfaces;
using API.Interfaces.Services; using API.Interfaces.Services;
using API.Parser; using API.Parser;
@ -46,81 +48,114 @@ namespace API.Services.Tasks
[DisableConcurrentExecution(timeoutInSeconds: 360)] [DisableConcurrentExecution(timeoutInSeconds: 360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanSeries(int libraryId, int seriesId, bool forceUpdate, CancellationToken token) public async Task ScanSeries(int libraryId, int seriesId, CancellationToken token)
{ {
var sw = new Stopwatch();
var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId); var files = await _unitOfWork.SeriesRepository.GetFilesForSeries(seriesId);
var series = await _unitOfWork.SeriesRepository.GetSeriesByIdAsync(seriesId); var series = await _unitOfWork.SeriesRepository.GetFullSeriesForSeriesIdAsync(seriesId);
var library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId, seriesId); var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new[] {seriesId});
var dirs = DirectoryService.FindHighestDirectoriesFromFiles(library.Folders.Select(f => f.Path), files.Select(f => f.FilePath).ToList()); var library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
var chapterIds = await _unitOfWork.SeriesRepository.GetChapterIdsForSeriesAsync(new []{ seriesId }); var folderPaths = library.Folders.Select(f => f.Path).ToList();
var dirs = DirectoryService.FindHighestDirectoriesFromFiles(folderPaths, files.Select(f => f.FilePath).ToList());
_logger.LogInformation("Beginning file scan on {SeriesName}", series.Name); _logger.LogInformation("Beginning file scan on {SeriesName}", series.Name);
var scanner = new ParseScannedFiles(_bookService, _logger); var scanner = new ParseScannedFiles(_bookService, _logger);
var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime); var parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles, out var scanElapsedTime);
// If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue // Remove any parsedSeries keys that don't belong to our series. This can occur when users store 2 series in the same folder
// Hence we clear out anything but what we selected for RemoveParsedInfosNotForSeries(parsedSeries, series);
var firstSeries = library.Series.FirstOrDefault();
// If nothing was found, first validate any of the files still exist. If they don't then we have a deletion and can skip the rest of the logic flow
if (parsedSeries.Count == 0)
{
var anyFilesExist =
(await _unitOfWork.SeriesRepository.GetFilesForSeries(series.Id)).Any(m => File.Exists(m.FilePath));
if (!anyFilesExist)
{
try
{
_unitOfWork.SeriesRepository.Remove(series);
await CommitAndSend(totalFiles, parsedSeries, sw, scanElapsedTime, series);
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an error during ScanSeries to delete the series");
await _unitOfWork.RollbackAsync();
}
}
else
{
// We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely,
// the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root
// is the series folder.
var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName));
if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
{
dirs = new Dictionary<string, string>();
var path = Directory.GetParent(existingFolder)?.FullName;
if (!folderPaths.Contains(path) || !folderPaths.Any(p => p.Contains(path ?? string.Empty)))
{
_logger.LogInformation("[ScanService] Aborted: {SeriesName} has bad naming convention and sits at root of library. Cannot scan series without deletion occuring. Correct file names to have Series Name within it or perform Scan Library", series.OriginalName);
return;
}
if (!string.IsNullOrEmpty(path))
{
dirs[path] = string.Empty;
}
}
_logger.LogInformation("{SeriesName} has bad naming convention, forcing rescan at a higher directory", series.OriginalName);
scanner = new ParseScannedFiles(_bookService, _logger);
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
totalFiles += totalFiles2;
scanElapsedTime += scanElapsedTime2;
RemoveParsedInfosNotForSeries(parsedSeries, series);
}
}
// At this point, parsedSeries will have at least one key and we can perform the update. If it still doesn't, just return and don't do anything
if (parsedSeries.Count == 0) return;
try
{
UpdateSeries(series, parsedSeries);
await CommitAndSend(totalFiles, parsedSeries, sw, scanElapsedTime, series);
}
catch (Exception ex)
{
_logger.LogCritical(ex, "There was an error during ScanSeries to update the series");
await _unitOfWork.RollbackAsync();
}
// Tell UI that this series is done
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId, series.Name),
cancellationToken: token);
await CleanupDbEntities();
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, series.Id, false));
}
private static void RemoveParsedInfosNotForSeries(Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, Series series)
{
var keys = parsedSeries.Keys; var keys = parsedSeries.Keys;
foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format)) foreach (var key in keys.Where(key =>
!series.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || series.Format != key.Format))
{ {
parsedSeries.Remove(key); parsedSeries.Remove(key);
} }
}
if (parsedSeries.Count == 0) private async Task CommitAndSend(int totalFiles,
Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries, Stopwatch sw, long scanElapsedTime, Series series)
{
if (_unitOfWork.HasChanges())
{ {
// We need to do an additional check for an edge case: If the scan ran and the files do not match the existing Series name, then it is very likely, await _unitOfWork.CommitAsync();
// the files have crap naming and if we don't correct, the series will get deleted due to the parser not being able to fallback onto folder parsing as the root _logger.LogInformation(
// is the series folder. "Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
var existingFolder = dirs.Keys.FirstOrDefault(key => key.Contains(series.OriginalName)); totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
if (dirs.Keys.Count == 1 && !string.IsNullOrEmpty(existingFolder))
{
dirs = new Dictionary<string, string>();
var path = Path.GetPathRoot(existingFolder);
if (!string.IsNullOrEmpty(path))
{
dirs[path] = string.Empty;
}
}
_logger.LogDebug("{SeriesName} has bad naming convention, forcing rescan at a higher directory.", series.OriginalName);
scanner = new ParseScannedFiles(_bookService, _logger);
parsedSeries = scanner.ScanLibrariesForSeries(library.Type, dirs.Keys, out var totalFiles2, out var scanElapsedTime2);
totalFiles += totalFiles2;
scanElapsedTime += scanElapsedTime2;
// If a root level folder scan occurs, then multiple series gets passed in and thus we get a unique constraint issue
// Hence we clear out anything but what we selected for
firstSeries = library.Series.FirstOrDefault();
keys = parsedSeries.Keys;
foreach (var key in keys.Where(key => !firstSeries.NameInParserInfo(parsedSeries[key].FirstOrDefault()) || firstSeries?.Format != key.Format))
{
parsedSeries.Remove(key);
}
} }
var sw = new Stopwatch();
UpdateLibrary(library, parsedSeries);
_unitOfWork.LibraryRepository.Update(library);
if (await _unitOfWork.CommitAsync())
{
_logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {SeriesName}",
totalFiles, parsedSeries.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, series.Name);
await CleanupDbEntities();
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadataForSeries(libraryId, seriesId, forceUpdate));
BackgroundJob.Enqueue(() => _cacheService.CleanupChapters(chapterIds));
// Tell UI that this series is done
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanSeries, MessageFactory.ScanSeriesEvent(seriesId), cancellationToken: token);
}
else
{
_logger.LogCritical(
"There was a critical error that resulted in a failed scan. Please check logs and rescan");
await _unitOfWork.RollbackAsync();
}
} }
@ -132,7 +167,7 @@ namespace API.Services.Tasks
var libraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync(); var libraries = await _unitOfWork.LibraryRepository.GetLibrariesAsync();
foreach (var lib in libraries) foreach (var lib in libraries)
{ {
await ScanLibrary(lib.Id, false); await ScanLibrary(lib.Id);
} }
_logger.LogInformation("Scan of All Libraries Finished"); _logger.LogInformation("Scan of All Libraries Finished");
} }
@ -144,24 +179,26 @@ namespace API.Services.Tasks
/// ie) all entities will be rechecked for new cover images and comicInfo.xml changes /// ie) all entities will be rechecked for new cover images and comicInfo.xml changes
/// </summary> /// </summary>
/// <param name="libraryId"></param> /// <param name="libraryId"></param>
/// <param name="forceUpdate"></param>
[DisableConcurrentExecution(360)] [DisableConcurrentExecution(360)]
[AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)] [AutomaticRetry(Attempts = 0, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public async Task ScanLibrary(int libraryId, bool forceUpdate) public async Task ScanLibrary(int libraryId)
{ {
Library library; Library library;
try try
{ {
library = await _unitOfWork.LibraryRepository.GetFullLibraryForIdAsync(libraryId); library = await _unitOfWork.LibraryRepository.GetLibraryForIdAsync(libraryId, LibraryIncludes.Folders);
} }
catch (Exception ex) catch (Exception ex)
{ {
// This usually only fails if user is not authenticated. // This usually only fails if user is not authenticated.
_logger.LogError(ex, "There was an issue fetching Library {LibraryId}", libraryId); _logger.LogError(ex, "[ScannerService] There was an issue fetching Library {LibraryId}", libraryId);
return; return;
} }
_logger.LogInformation("Beginning file scan on {LibraryName}", library.Name); _logger.LogInformation("[ScannerService] Beginning file scan on {LibraryName}", library.Name);
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 0));
var scanner = new ParseScannedFiles(_bookService, _logger); var scanner = new ParseScannedFiles(_bookService, _logger);
var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime); var series = scanner.ScanLibrariesForSeries(library.Type, library.Folders.Select(fp => fp.Path), out var totalFiles, out var scanElapsedTime);
@ -171,25 +208,27 @@ namespace API.Services.Tasks
} }
var sw = Stopwatch.StartNew(); var sw = Stopwatch.StartNew();
UpdateLibrary(library, series); await UpdateLibrary(library, series);
library.LastScanned = DateTime.Now;
_unitOfWork.LibraryRepository.Update(library); _unitOfWork.LibraryRepository.Update(library);
if (await _unitOfWork.CommitAsync()) if (await _unitOfWork.CommitAsync())
{ {
_logger.LogInformation( _logger.LogInformation(
"Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}", "[ScannerService] Processed {TotalFiles} files and {ParsedSeriesCount} series in {ElapsedScanTime} milliseconds for {LibraryName}",
totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name); totalFiles, series.Keys.Count, sw.ElapsedMilliseconds + scanElapsedTime, library.Name);
} }
else else
{ {
_logger.LogCritical( _logger.LogCritical(
"There was a critical error that resulted in a failed scan. Please check logs and rescan"); "[ScannerService] There was a critical error that resulted in a failed scan. Please check logs and rescan");
} }
await CleanupAbandonedChapters(); await CleanupDbEntities();
BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, forceUpdate)); BackgroundJob.Enqueue(() => _metadataService.RefreshMetadata(libraryId, false));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibrary, MessageFactory.ScanLibraryEvent(libraryId, "complete")); await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(libraryId, 100));
} }
/// <summary> /// <summary>
@ -212,78 +251,171 @@ namespace API.Services.Tasks
_logger.LogInformation("Removed {Count} abandoned collection tags", cleanedUp); _logger.LogInformation("Removed {Count} abandoned collection tags", cleanedUp);
} }
private void UpdateLibrary(Library library, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries) private async Task UpdateLibrary(Library library, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{ {
if (parsedSeries == null) throw new ArgumentNullException(nameof(parsedSeries)); if (parsedSeries == null) return;
// First, remove any series that are not in parsedSeries list // Library contains no Series, so we need to fetch series in groups of ChunkSize
var missingSeries = FindSeriesNotOnDisk(library.Series, parsedSeries).ToList(); var chunkInfo = await _unitOfWork.SeriesRepository.GetChunkInfo(library.Id);
library.Series = RemoveMissingSeries(library.Series, missingSeries, out var removeCount); var stopwatch = Stopwatch.StartNew();
if (removeCount > 0) var totalTime = 0L;
// Update existing series
_logger.LogDebug("[ScannerService] Updating existing series");
for (var chunk = 1; chunk <= chunkInfo.TotalChunks; chunk++)
{ {
_logger.LogInformation("Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount); if (chunkInfo.TotalChunks == 0) continue;
foreach (var s in missingSeries) totalTime += stopwatch.ElapsedMilliseconds;
{ stopwatch.Restart();
_logger.LogDebug("Removed {SeriesName} ({Format})", s.Name, s.Format); _logger.LogDebug($"[ScannerService] Processing chunk {chunk} / {chunkInfo.TotalChunks} with size {chunkInfo.ChunkSize} Series ({chunk * chunkInfo.ChunkSize} - {(chunk + 1) * chunkInfo.ChunkSize}");
} var nonLibrarySeries = await _unitOfWork.SeriesRepository.GetFullSeriesForLibraryIdAsync(library.Id, new UserParams()
{
PageNumber = chunk,
PageSize = chunkInfo.ChunkSize
});
// First, remove any series that are not in parsedSeries list
var missingSeries = FindSeriesNotOnDisk(nonLibrarySeries, parsedSeries).ToList();
foreach (var missing in missingSeries)
{
_unitOfWork.SeriesRepository.Remove(missing);
}
var cleanedSeries = RemoveMissingSeries(nonLibrarySeries, missingSeries, out var removeCount);
if (removeCount > 0)
{
_logger.LogInformation("[ScannerService] Removed {RemoveMissingSeries} series that are no longer on disk:", removeCount);
foreach (var s in missingSeries)
{
_logger.LogDebug("[ScannerService] Removed {SeriesName} ({Format})", s.Name, s.Format);
}
}
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series
var librarySeries = cleanedSeries.ToList();
Parallel.ForEach(librarySeries, (series) =>
{
UpdateSeries(series, parsedSeries);
});
await _unitOfWork.CommitAsync();
_logger.LogInformation(
"[ScannerService] Processed {SeriesStart} - {SeriesEnd} series in {ElapsedScanTime} milliseconds for {LibraryName}",
chunk * chunkInfo.ChunkSize, (chunk * chunkInfo.ChunkSize) + nonLibrarySeries.Count, totalTime, library.Name);
// Emit any series removed
foreach (var missing in missingSeries)
{
await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesRemoved, MessageFactory.SeriesRemovedEvent(missing.Id, missing.Name, library.Id));
}
var progress = Math.Max(0, Math.Min(100, ((chunk + 1F) * chunkInfo.ChunkSize) / chunkInfo.TotalSize));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(library.Id, progress));
} }
// Add new series that have parsedInfos // Add new series that have parsedInfos
_logger.LogDebug("[ScannerService] Adding new series");
var newSeries = new List<Series>();
var allSeries = (await _unitOfWork.SeriesRepository.GetSeriesForLibraryIdAsync(library.Id)).ToList();
foreach (var (key, infos) in parsedSeries) foreach (var (key, infos) in parsedSeries)
{ {
// Key is normalized already // Key is normalized already
Series existingSeries; Series existingSeries;
try try
{ {
existingSeries = library.Series.SingleOrDefault(s => existingSeries = allSeries.SingleOrDefault(s =>
(s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName) (s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName)
&& (s.Format == key.Format || s.Format == MangaFormat.Unknown)); && (s.Format == key.Format || s.Format == MangaFormat.Unknown));
} }
catch (Exception e) catch (Exception e)
{ {
_logger.LogCritical(e, "There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it", key.NormalizedName); _logger.LogCritical(e, "[ScannerService] There are multiple series that map to normalized key {Key}. You can manually delete the entity via UI and rescan to fix it. This will be skipped", key.NormalizedName);
var duplicateSeries = library.Series.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList(); var duplicateSeries = allSeries.Where(s => s.NormalizedName == key.NormalizedName || Parser.Parser.Normalize(s.OriginalName) == key.NormalizedName).ToList();
foreach (var series in duplicateSeries) foreach (var series in duplicateSeries)
{ {
_logger.LogCritical("{Key} maps with {Series}", key.Name, series.OriginalName); _logger.LogCritical("[ScannerService] Duplicate Series Found: {Key} maps with {Series}", key.Name, series.OriginalName);
} }
continue; continue;
} }
if (existingSeries == null)
{
existingSeries = DbFactory.Series(infos[0].Series);
existingSeries.Format = key.Format;
library.Series.Add(existingSeries);
}
existingSeries.NormalizedName = Parser.Parser.Normalize(existingSeries.Name); if (existingSeries != null) continue;
existingSeries.OriginalName ??= infos[0].Series;
existingSeries.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>()); existingSeries = DbFactory.Series(infos[0].Series);
existingSeries.Format = key.Format; existingSeries.Format = key.Format;
newSeries.Add(existingSeries);
} }
// Now, we only have to deal with series that exist on disk. Let's recalculate the volumes for each series var i = 0;
var librarySeries = library.Series.ToList(); foreach(var series in newSeries)
Parallel.ForEach(librarySeries, (series) =>
{ {
try try
{ {
_logger.LogInformation("Processing series {SeriesName}", series.OriginalName); _logger.LogDebug("[ScannerService] Processing series {SeriesName}", series.OriginalName);
UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray()); UpdateVolumes(series, ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray());
series.Pages = series.Volumes.Sum(v => v.Pages); series.Pages = series.Volumes.Sum(v => v.Pages);
} series.LibraryId = library.Id; // We have to manually set this since we aren't adding the series to the Library's series.
catch (Exception ex) _unitOfWork.SeriesRepository.Attach(series);
{ if (await _unitOfWork.CommitAsync())
_logger.LogError(ex, "There was an exception updating volumes for {SeriesName}", series.Name); {
} _logger.LogInformation(
}); "[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
// Last step, remove any series that have no pages // Inform UI of new series added
library.Series = library.Series.Where(s => s.Pages > 0).ToList(); await _messageHub.Clients.All.SendAsync(SignalREvents.SeriesAdded, MessageFactory.SeriesAddedEvent(series.Id, series.Name, library.Id));
var progress = Math.Max(0F, Math.Min(100F, i * 1F / newSeries.Count));
await _messageHub.Clients.All.SendAsync(SignalREvents.ScanLibraryProgress,
MessageFactory.ScanLibraryProgressEvent(library.Id, progress));
}
else
{
// This is probably not needed. Better to catch the exception.
_logger.LogCritical(
"[ScannerService] There was a critical error that resulted in a failed scan. Please check logs and rescan");
}
i++;
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
}
}
_logger.LogDebug(
"[ScannerService] Added {NewSeries} series in {ElapsedScanTime} milliseconds for {LibraryName}",
newSeries.Count, stopwatch.ElapsedMilliseconds, library.Name);
} }
public IEnumerable<Series> FindSeriesNotOnDisk(ICollection<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries) private void UpdateSeries(Series series, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{
try
{
_logger.LogInformation("[ScannerService] Processing series {SeriesName}", series.OriginalName);
var parsedInfos = ParseScannedFiles.GetInfosByName(parsedSeries, series).ToArray();
UpdateVolumes(series, parsedInfos);
series.Pages = series.Volumes.Sum(v => v.Pages);
series.NormalizedName = Parser.Parser.Normalize(series.Name);
series.Metadata ??= DbFactory.SeriesMetadata(new List<CollectionTag>());
if (series.Format == MangaFormat.Unknown)
{
series.Format = parsedInfos[0].Format;
}
series.OriginalName ??= parsedInfos[0].Series;
}
catch (Exception ex)
{
_logger.LogError(ex, "[ScannerService] There was an exception updating volumes for {SeriesName}", series.Name);
}
}
public static IEnumerable<Series> FindSeriesNotOnDisk(IEnumerable<Series> existingSeries, Dictionary<ParsedSeries, List<ParserInfo>> parsedSeries)
{ {
var foundSeries = parsedSeries.Select(s => s.Key.Name).ToList(); var foundSeries = parsedSeries.Select(s => s.Key.Name).ToList();
return existingSeries.Where(es => !es.NameInList(foundSeries) && !SeriesHasMatchingParserInfoFormat(es, parsedSeries)); return existingSeries.Where(es => !es.NameInList(foundSeries) && !SeriesHasMatchingParserInfoFormat(es, parsedSeries));
@ -332,7 +464,7 @@ namespace API.Services.Tasks
/// <param name="missingSeries">Series not found on disk or can't be parsed</param> /// <param name="missingSeries">Series not found on disk or can't be parsed</param>
/// <param name="removeCount"></param> /// <param name="removeCount"></param>
/// <returns>the updated existingSeries</returns> /// <returns>the updated existingSeries</returns>
public static ICollection<Series> RemoveMissingSeries(ICollection<Series> existingSeries, IEnumerable<Series> missingSeries, out int removeCount) public static IList<Series> RemoveMissingSeries(IList<Series> existingSeries, IEnumerable<Series> missingSeries, out int removeCount)
{ {
var existingCount = existingSeries.Count; var existingCount = existingSeries.Count;
var missingList = missingSeries.ToList(); var missingList = missingSeries.ToList();
@ -351,7 +483,7 @@ namespace API.Services.Tasks
var startingVolumeCount = series.Volumes.Count; var startingVolumeCount = series.Volumes.Count;
// Add new volumes and update chapters per volume // Add new volumes and update chapters per volume
var distinctVolumes = parsedInfos.DistinctVolumes(); var distinctVolumes = parsedInfos.DistinctVolumes();
_logger.LogDebug("Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name); _logger.LogDebug("[ScannerService] Updating {DistinctVolumes} volumes on {SeriesName}", distinctVolumes.Count, series.Name);
foreach (var volumeNumber in distinctVolumes) foreach (var volumeNumber in distinctVolumes)
{ {
var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber); var volume = series.Volumes.SingleOrDefault(s => s.Name == volumeNumber);
@ -359,9 +491,10 @@ namespace API.Services.Tasks
{ {
volume = DbFactory.Volume(volumeNumber); volume = DbFactory.Volume(volumeNumber);
series.Volumes.Add(volume); series.Volumes.Add(volume);
_unitOfWork.VolumeRepository.Add(volume);
} }
_logger.LogDebug("Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name); _logger.LogDebug("[ScannerService] Parsing {SeriesName} - Volume {VolumeNumber}", series.Name, volume.Name);
var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray(); var infos = parsedInfos.Where(p => p.Volumes == volumeNumber).ToArray();
UpdateChapters(volume, infos); UpdateChapters(volume, infos);
volume.Pages = volume.Chapters.Sum(c => c.Pages); volume.Pages = volume.Chapters.Sum(c => c.Pages);
@ -371,23 +504,26 @@ namespace API.Services.Tasks
var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList(); var nonDeletedVolumes = series.Volumes.Where(v => parsedInfos.Select(p => p.Volumes).Contains(v.Name)).ToList();
if (series.Volumes.Count != nonDeletedVolumes.Count) if (series.Volumes.Count != nonDeletedVolumes.Count)
{ {
_logger.LogDebug("Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name", _logger.LogDebug("[ScannerService] Removed {Count} volumes from {SeriesName} where parsed infos were not mapping with volume name",
(series.Volumes.Count - nonDeletedVolumes.Count), series.Name); (series.Volumes.Count - nonDeletedVolumes.Count), series.Name);
var deletedVolumes = series.Volumes.Except(nonDeletedVolumes); var deletedVolumes = series.Volumes.Except(nonDeletedVolumes);
foreach (var volume in deletedVolumes) foreach (var volume in deletedVolumes)
{ {
var file = volume.Chapters.FirstOrDefault()?.Files.FirstOrDefault()?.FilePath ?? "no files"; var file = volume.Chapters.FirstOrDefault()?.Files?.FirstOrDefault()?.FilePath ?? "";
if (new FileInfo(file).Exists) if (!string.IsNullOrEmpty(file) && File.Exists(file))
{ {
_logger.LogError("Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}", file); _logger.LogError(
} "[ScannerService] Volume cleanup code was trying to remove a volume with a file still existing on disk. File: {File}",
_logger.LogDebug("Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file); file);
}
_logger.LogDebug("[ScannerService] Removed {SeriesName} - Volume {Volume}: {File}", series.Name, volume.Name, file);
} }
series.Volumes = nonDeletedVolumes; series.Volumes = nonDeletedVolumes;
} }
_logger.LogDebug("Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}", _logger.LogDebug("[ScannerService] Updated {SeriesName} volumes from {StartingVolumeCount} to {VolumeCount}",
series.Name, startingVolumeCount, series.Volumes.Count); series.Name, startingVolumeCount, series.Volumes.Count);
} }
@ -417,7 +553,7 @@ namespace API.Services.Tasks
if (chapter == null) if (chapter == null)
{ {
_logger.LogDebug( _logger.LogDebug(
"Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters); "[ScannerService] Adding new chapter, {Series} - Vol {Volume} Ch {Chapter}", info.Series, info.Volumes, info.Chapters);
volume.Chapters.Add(DbFactory.Chapter(info)); volume.Chapters.Add(DbFactory.Chapter(info));
} }
else else
@ -454,7 +590,7 @@ namespace API.Services.Tasks
{ {
if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter)) if (existingChapter.Files.Count == 0 || !parsedInfos.HasInfo(existingChapter))
{ {
_logger.LogDebug("Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series); _logger.LogDebug("[ScannerService] Removed chapter {Chapter} for Volume {VolumeNumber} on {SeriesName}", existingChapter.Range, volume.Name, parsedInfos[0].Series);
volume.Chapters.Remove(existingChapter); volume.Chapters.Remove(existingChapter);
} }
else else
@ -470,42 +606,47 @@ namespace API.Services.Tasks
private MangaFile CreateMangaFile(ParserInfo info) private MangaFile CreateMangaFile(ParserInfo info)
{ {
switch (info.Format) MangaFile mangaFile = null;
switch (info.Format)
{ {
case MangaFormat.Archive: case MangaFormat.Archive:
{ {
return new MangaFile() mangaFile = new MangaFile()
{ {
FilePath = info.FullFilePath, FilePath = info.FullFilePath,
Format = info.Format, Format = info.Format,
Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath) Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath)
}; };
break;
} }
case MangaFormat.Pdf: case MangaFormat.Pdf:
case MangaFormat.Epub: case MangaFormat.Epub:
{ {
return new MangaFile() mangaFile = new MangaFile()
{ {
FilePath = info.FullFilePath, FilePath = info.FullFilePath,
Format = info.Format, Format = info.Format,
Pages = _bookService.GetNumberOfPages(info.FullFilePath) Pages = _bookService.GetNumberOfPages(info.FullFilePath)
}; };
break;
} }
case MangaFormat.Image: case MangaFormat.Image:
{ {
return new MangaFile() mangaFile = new MangaFile()
{ {
FilePath = info.FullFilePath, FilePath = info.FullFilePath,
Format = info.Format, Format = info.Format,
Pages = 1 Pages = 1
}; };
break;
} }
default: default:
_logger.LogWarning("[Scanner] Ignoring {Filename}. File type is not supported", info.Filename); _logger.LogWarning("[Scanner] Ignoring {Filename}. File type is not supported", info.Filename);
break; break;
} }
return null; mangaFile?.UpdateLastModified();
return mangaFile;
} }
private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info) private void AddOrUpdateFileForChapter(Chapter chapter, ParserInfo info)
@ -515,20 +656,31 @@ namespace API.Services.Tasks
if (existingFile != null) if (existingFile != null)
{ {
existingFile.Format = info.Format; existingFile.Format = info.Format;
if (existingFile.HasFileBeenModified() || existingFile.Pages == 0) if (!existingFile.HasFileBeenModified() && existingFile.Pages != 0) return;
switch (existingFile.Format)
{ {
existingFile.Pages = (existingFile.Format == MangaFormat.Epub || existingFile.Format == MangaFormat.Pdf) case MangaFormat.Epub:
? _bookService.GetNumberOfPages(info.FullFilePath) case MangaFormat.Pdf:
: _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath); existingFile.Pages = _bookService.GetNumberOfPages(info.FullFilePath);
break;
case MangaFormat.Image:
existingFile.Pages = 1;
break;
case MangaFormat.Unknown:
existingFile.Pages = 0;
break;
case MangaFormat.Archive:
existingFile.Pages = _archiveService.GetNumberOfPagesFromArchive(info.FullFilePath);
break;
} }
existingFile.LastModified = File.GetLastWriteTime(info.FullFilePath);
} }
else else
{ {
var file = CreateMangaFile(info); var file = CreateMangaFile(info);
if (file != null) if (file == null) return;
{
chapter.Files.Add(file); chapter.Files.Add(file);
}
} }
} }
} }

View File

@ -1,23 +1,52 @@
using System.Threading; using System;
using API.DTOs.Update; using API.DTOs.Update;
namespace API.SignalR namespace API.SignalR
{ {
public static class MessageFactory public static class MessageFactory
{ {
public static SignalRMessage ScanSeriesEvent(int seriesId) public static SignalRMessage ScanSeriesEvent(int seriesId, string seriesName)
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
Name = SignalREvents.ScanSeries, Name = SignalREvents.ScanSeries,
Body = new Body = new
{ {
SeriesId = seriesId SeriesId = seriesId,
SeriesName = seriesName
} }
}; };
} }
public static SignalRMessage ScanLibraryEvent(int libraryId, string stage) public static SignalRMessage SeriesAddedEvent(int seriesId, string seriesName, int libraryId)
{
return new SignalRMessage()
{
Name = SignalREvents.SeriesAdded,
Body = new
{
SeriesId = seriesId,
SeriesName = seriesName,
LibraryId = libraryId
}
};
}
public static SignalRMessage SeriesRemovedEvent(int seriesId, string seriesName, int libraryId)
{
return new SignalRMessage()
{
Name = SignalREvents.SeriesRemoved,
Body = new
{
SeriesId = seriesId,
SeriesName = seriesName,
LibraryId = libraryId
}
};
}
public static SignalRMessage ScanLibraryProgressEvent(int libraryId, float progress)
{ {
return new SignalRMessage() return new SignalRMessage()
{ {
@ -25,11 +54,14 @@ namespace API.SignalR
Body = new Body = new
{ {
LibraryId = libraryId, LibraryId = libraryId,
Stage = stage Progress = progress,
EventTime = DateTime.Now
} }
}; };
} }
public static SignalRMessage RefreshMetadataEvent(int libraryId, int seriesId) public static SignalRMessage RefreshMetadataEvent(int libraryId, int seriesId)
{ {
return new SignalRMessage() return new SignalRMessage()
@ -52,5 +84,17 @@ namespace API.SignalR
}; };
} }
public static SignalRMessage SeriesAddedToCollection(int tagId, int seriesId)
{
return new SignalRMessage
{
Name = SignalREvents.UpdateVersion,
Body = new
{
TagId = tagId,
SeriesId = seriesId
}
};
}
} }
} }

View File

@ -1,6 +1,8 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using API.Extensions;
using API.SignalR.Presence;
using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.SignalR; using Microsoft.AspNetCore.SignalR;
@ -13,8 +15,14 @@ namespace API.SignalR
[Authorize] [Authorize]
public class MessageHub : Hub public class MessageHub : Hub
{ {
private readonly IPresenceTracker _tracker;
private static readonly HashSet<string> Connections = new HashSet<string>(); private static readonly HashSet<string> Connections = new HashSet<string>();
public MessageHub(IPresenceTracker tracker)
{
_tracker = tracker;
}
public static bool IsConnected public static bool IsConnected
{ {
get get
@ -33,6 +41,12 @@ namespace API.SignalR
Connections.Add(Context.ConnectionId); Connections.Add(Context.ConnectionId);
} }
await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync(SignalREvents.OnlineUsers, currentUsers);
await base.OnConnectedAsync(); await base.OnConnectedAsync();
} }
@ -43,6 +57,12 @@ namespace API.SignalR
Connections.Remove(Context.ConnectionId); Connections.Remove(Context.ConnectionId);
} }
await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync(SignalREvents.OnlineUsers, currentUsers);
await base.OnDisconnectedAsync(exception); await base.OnDisconnectedAsync(exception);
} }
} }

View File

@ -1,41 +0,0 @@
using System;
using System.Threading.Tasks;
using API.Extensions;
using API.SignalR.Presence;
using Microsoft.AspNetCore.SignalR;
namespace API.SignalR
{
/// <summary>
/// Keeps track of who is logged into the app
/// </summary>
public class PresenceHub : Hub
{
private readonly IPresenceTracker _tracker;
public PresenceHub(IPresenceTracker tracker)
{
_tracker = tracker;
}
public override async Task OnConnectedAsync()
{
await _tracker.UserConnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync("GetOnlineUsers", currentUsers);
}
public override async Task OnDisconnectedAsync(Exception exception)
{
await _tracker.UserDisconnected(Context.User.GetUsername(), Context.ConnectionId);
var currentUsers = await PresenceTracker.GetOnlineUsers();
await Clients.All.SendAsync("GetOnlineUsers", currentUsers);
await base.OnDisconnectedAsync(exception);
}
}
}

View File

@ -6,6 +6,10 @@
public const string ScanSeries = "ScanSeries"; public const string ScanSeries = "ScanSeries";
public const string RefreshMetadata = "RefreshMetadata"; public const string RefreshMetadata = "RefreshMetadata";
public const string ScanLibrary = "ScanLibrary"; public const string ScanLibrary = "ScanLibrary";
public const string SeriesAdded = "SeriesAdded";
public const string SeriesRemoved = "SeriesRemoved";
public const string ScanLibraryProgress = "ScanLibraryProgress";
public const string OnlineUsers = "OnlineUsers";
public const string SeriesAddedToCollection = "SeriesAddedToCollection";
} }
} }

View File

@ -5,6 +5,8 @@ using System.Linq;
using System.Net; using System.Net;
using System.Net.Sockets; using System.Net.Sockets;
using API.Extensions; using API.Extensions;
using API.Interfaces;
using API.Interfaces.Repositories;
using API.Middleware; using API.Middleware;
using API.Services; using API.Services;
using API.Services.HostedServices; using API.Services.HostedServices;
@ -52,8 +54,41 @@ namespace API
services.AddSwaggerGen(c => services.AddSwaggerGen(c =>
{ {
c.SwaggerDoc("v1", new OpenApiInfo { Title = "Kavita API", Version = "v1" }); c.SwaggerDoc("v1", new OpenApiInfo { Title = "Kavita API", Version = "v1" });
c.SwaggerDoc("Kavita API", new OpenApiInfo()
{
Description = "Kavita provides a set of APIs that are authenticated by JWT. JWT token can be copied from local storage.",
Title = "Kavita API",
Version = "v1",
});
var filePath = Path.Combine(AppContext.BaseDirectory, "API.xml"); var filePath = Path.Combine(AppContext.BaseDirectory, "API.xml");
c.IncludeXmlComments(filePath); c.IncludeXmlComments(filePath);
c.AddSecurityDefinition("Bearer", new OpenApiSecurityScheme {
In = ParameterLocation.Header,
Description = "Please insert JWT with Bearer into field",
Name = "Authorization",
Type = SecuritySchemeType.ApiKey
});
c.AddSecurityRequirement(new OpenApiSecurityRequirement {
{
new OpenApiSecurityScheme
{
Reference = new OpenApiReference
{
Type = ReferenceType.SecurityScheme,
Id = "Bearer"
}
},
Array.Empty<string>()
}
});
c.AddServer(new OpenApiServer()
{
Description = "Local Server",
Url = "http://localhost:5000/",
});
}); });
services.AddResponseCompression(options => services.AddResponseCompression(options =>
{ {
@ -88,14 +123,17 @@ namespace API
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env, public void Configure(IApplicationBuilder app, IBackgroundJobClient backgroundJobs, IWebHostEnvironment env,
IHostApplicationLifetime applicationLifetime) IHostApplicationLifetime applicationLifetime, IServiceProvider serviceProvider)
{ {
app.UseMiddleware<ExceptionMiddleware>(); app.UseMiddleware<ExceptionMiddleware>();
if (env.IsDevelopment()) if (env.IsDevelopment())
{ {
app.UseSwagger(); app.UseSwagger();
app.UseSwaggerUI(c => c.SwaggerEndpoint("/swagger/v1/swagger.json", "API v1")); app.UseSwaggerUI(c =>
{
c.SwaggerEndpoint("/swagger/v1/swagger.json", "Kavita API " + BuildInfo.Version);
});
app.UseHangfireDashboard(); app.UseHangfireDashboard();
} }
@ -124,11 +162,29 @@ namespace API
app.UseDefaultFiles(); app.UseDefaultFiles();
// This is not implemented completely. Commenting out until implemented
// var service = serviceProvider.GetRequiredService<IUnitOfWork>();
// var settings = service.SettingsRepository.GetSettingsDto();
// if (!string.IsNullOrEmpty(settings.BaseUrl) && !settings.BaseUrl.Equals("/"))
// {
// var path = !settings.BaseUrl.StartsWith("/")
// ? $"/{settings.BaseUrl}"
// : settings.BaseUrl;
// path = !path.EndsWith("/")
// ? $"{path}/"
// : path;
// app.UsePathBase(path);
// Console.WriteLine("Starting with base url as " + path);
// }
app.UseStaticFiles(new StaticFileOptions app.UseStaticFiles(new StaticFileOptions
{ {
ContentTypeProvider = new FileExtensionContentTypeProvider() ContentTypeProvider = new FileExtensionContentTypeProvider()
}); });
app.Use(async (context, next) => app.Use(async (context, next) =>
{ {
context.Response.GetTypedHeaders().CacheControl = context.Response.GetTypedHeaders().CacheControl =
@ -147,7 +203,6 @@ namespace API
{ {
endpoints.MapControllers(); endpoints.MapControllers();
endpoints.MapHub<MessageHub>("hubs/messages"); endpoints.MapHub<MessageHub>("hubs/messages");
endpoints.MapHub<PresenceHub>("hubs/presence");
endpoints.MapHangfireDashboard(); endpoints.MapHangfireDashboard();
endpoints.MapFallbackToController("Index", "Fallback"); endpoints.MapFallbackToController("Index", "Fallback");
}); });

View File

@ -4,15 +4,14 @@
<TargetFramework>net5.0</TargetFramework> <TargetFramework>net5.0</TargetFramework>
<Company>kavitareader.com</Company> <Company>kavitareader.com</Company>
<Product>Kavita</Product> <Product>Kavita</Product>
<AssemblyVersion>0.4.6.1</AssemblyVersion> <AssemblyVersion>0.4.7.0</AssemblyVersion>
<NeutralLanguage>en</NeutralLanguage> <NeutralLanguage>en</NeutralLanguage>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="5.0.0" /> <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="5.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="5.0.0" /> <PackageReference Include="Microsoft.Extensions.Hosting" Version="5.0.0" />
<PackageReference Include="Sentry" Version="3.8.3" /> <PackageReference Include="SonarAnalyzer.CSharp" Version="8.29.0.36737">
<PackageReference Include="SonarAnalyzer.CSharp" Version="8.27.0.35380">
<PrivateAssets>all</PrivateAssets> <PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference> </PackageReference>

View File

@ -49,7 +49,7 @@ Password: Demouser64
- Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once) - Linux users must ensure the directory & kavita.db is writable by Kavita (might require starting server once)
- Run Kavita - Run Kavita
- If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices. - If you are updating, do not copy appsettings.json from the new version over. It will override your TokenKey and you will have to reauthenticate on your devices.
- Open localhost:5000 and setup your account and libraries in the UI.
### Docker ### Docker
Running your Kavita server in docker is super easy! Barely an inconvenience. You can run it with this command: Running your Kavita server in docker is super easy! Barely an inconvenience. You can run it with this command:

150
UI/Web/package-lock.json generated
View File

@ -2679,135 +2679,6 @@
} }
} }
}, },
"@sentry/angular": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/angular/-/angular-6.10.0.tgz",
"integrity": "sha512-SSnsz4sVu9LJh7RM+z9FopWytl2yYNZQ2nK/zv/6iQKIBOqvnCqUIPjVjq1rFYXOe0jOJKsn0QlQLKp4MajYMg==",
"requires": {
"@sentry/browser": "6.10.0",
"@sentry/types": "6.10.0",
"@sentry/utils": "6.10.0",
"rxjs": "^6.6.0",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sentry/browser": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-6.10.0.tgz",
"integrity": "sha512-H0Blgp8f8bomebkkGWIgxHVjabtQAlsKJDiFXBg7gIc75YcarRxwH0R3hMog1/h8mmv4CGGUsy5ljYW6jsNnvA==",
"requires": {
"@sentry/core": "6.10.0",
"@sentry/types": "6.10.0",
"@sentry/utils": "6.10.0",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sentry/core": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/core/-/core-6.10.0.tgz",
"integrity": "sha512-5KlxHJlbD7AMo+b9pMGkjxUOfMILtsqCtGgI7DMvZNfEkdohO8QgUY+hPqr540kmwArFS91ipQYWhqzGaOhM3Q==",
"requires": {
"@sentry/hub": "6.10.0",
"@sentry/minimal": "6.10.0",
"@sentry/types": "6.10.0",
"@sentry/utils": "6.10.0",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sentry/hub": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/hub/-/hub-6.10.0.tgz",
"integrity": "sha512-MV8wjhWiFAXZAhmj7Ef5QdBr2IF93u8xXiIo2J+dRZ7eVa4/ZszoUiDbhUcl/TPxczaw4oW2a6tINBNFLzXiig==",
"requires": {
"@sentry/types": "6.10.0",
"@sentry/utils": "6.10.0",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sentry/integrations": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/integrations/-/integrations-6.10.0.tgz",
"integrity": "sha512-NMtB0jjFYFZRxyjYu2dWLThk9YPIwqhi4hYywmWkbv4/ILzi5Rwnh+aqNW6yrj8qG4b9itNMh3YvEzmf0aqauw==",
"requires": {
"@sentry/types": "6.10.0",
"@sentry/utils": "6.10.0",
"localforage": "^1.8.1",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sentry/minimal": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/minimal/-/minimal-6.10.0.tgz",
"integrity": "sha512-yarm046UgUFIBoxqnBan2+BEgaO9KZCrLzsIsmALiQvpfW92K1lHurSawl5W6SR7wCYBnNn7CPvPE/BHFdy4YA==",
"requires": {
"@sentry/hub": "6.10.0",
"@sentry/types": "6.10.0",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sentry/types": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/types/-/types-6.10.0.tgz",
"integrity": "sha512-M7s0JFgG7/6/yNVYoPUbxzaXDhnzyIQYRRJJKRaTD77YO4MHvi4Ke8alBWqD5fer0cPIfcSkBqa9BLdqRqcMWw=="
},
"@sentry/utils": {
"version": "6.10.0",
"resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-6.10.0.tgz",
"integrity": "sha512-F9OczOcZMFtazYVZ6LfRIe65/eOfQbiAedIKS0li4npuMz0jKYRbxrjd/U7oLiNQkPAp4/BujU4m1ZIwq6a+tg==",
"requires": {
"@sentry/types": "6.10.0",
"tslib": "^1.9.3"
},
"dependencies": {
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
}
}
},
"@sinonjs/commons": { "@sinonjs/commons": {
"version": "1.8.2", "version": "1.8.2",
"resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.2.tgz", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.2.tgz",
@ -7521,7 +7392,8 @@
"immediate": { "immediate": {
"version": "3.0.6", "version": "3.0.6",
"resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
"integrity": "sha1-nbHb0Pr43m++D13V5Wu2BigN5ps=" "integrity": "sha1-nbHb0Pr43m++D13V5Wu2BigN5ps=",
"dev": true
}, },
"import-fresh": { "import-fresh": {
"version": "2.0.0", "version": "2.0.0",
@ -10032,24 +9904,6 @@
"json5": "^2.1.2" "json5": "^2.1.2"
} }
}, },
"localforage": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/localforage/-/localforage-1.9.0.tgz",
"integrity": "sha512-rR1oyNrKulpe+VM9cYmcFn6tsHuokyVHFaCM3+osEmxaHTbEk8oQu6eGDfS6DQLWi/N67XRmB8ECG37OES368g==",
"requires": {
"lie": "3.1.1"
},
"dependencies": {
"lie": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/lie/-/lie-3.1.1.tgz",
"integrity": "sha1-mkNrLMd0bKWd56QfpGmz77dr2H4=",
"requires": {
"immediate": "~3.0.5"
}
}
}
},
"locate-path": { "locate-path": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz",

View File

@ -31,8 +31,6 @@
"@ng-bootstrap/ng-bootstrap": "^9.1.0", "@ng-bootstrap/ng-bootstrap": "^9.1.0",
"@ngx-lite/nav-drawer": "^0.4.6", "@ngx-lite/nav-drawer": "^0.4.6",
"@ngx-lite/util": "0.0.0", "@ngx-lite/util": "0.0.0",
"@sentry/angular": "^6.10.0",
"@sentry/integrations": "^6.10.0",
"@types/file-saver": "^2.0.1", "@types/file-saver": "^2.0.1",
"angular-ng-autocomplete": "^2.0.5", "angular-ng-autocomplete": "^2.0.5",
"bootstrap": "^4.5.0", "bootstrap": "^4.5.0",

View File

@ -0,0 +1,10 @@
/**
* This is for base url only. Not to be used my applicaiton, only loading and bootstrapping app
*/
export class ConfigData {
baseUrl: string = '/';
constructor(baseUrl: string) {
this.baseUrl = baseUrl;
}
}

View File

@ -0,0 +1,4 @@
export interface RefreshMetadataEvent {
libraryId: number;
seriesId: number;
}

View File

@ -0,0 +1,5 @@
export interface ScanLibraryProgressEvent {
libraryId: number;
progress: number;
eventTime: string;
}

View File

@ -1,3 +1,4 @@
export interface ScanSeriesEvent { export interface ScanSeriesEvent {
seriesId: number; seriesId: number;
seriesName: string;
} }

View File

@ -0,0 +1,5 @@
export interface SeriesAddedEvent {
libraryId: number;
seriesId: number;
seriesName: string;
}

View File

@ -0,0 +1,4 @@
export interface SeriesAddedToCollectionEvent {
tagId: number;
seriesId: number;
}

View File

@ -7,7 +7,7 @@ export enum LibraryType {
export interface Library { export interface Library {
id: number; id: number;
name: string; name: string;
coverImage: string; lastScanned: string;
type: LibraryType; type: LibraryType;
folders: string[]; folders: string[];
} }

View File

@ -5,10 +5,8 @@ import { map, takeUntil } from 'rxjs/operators';
import { environment } from 'src/environments/environment'; import { environment } from 'src/environments/environment';
import { Preferences } from '../_models/preferences/preferences'; import { Preferences } from '../_models/preferences/preferences';
import { User } from '../_models/user'; import { User } from '../_models/user';
import * as Sentry from "@sentry/angular";
import { Router } from '@angular/router'; import { Router } from '@angular/router';
import { MessageHubService } from './message-hub.service'; import { MessageHubService } from './message-hub.service';
import { PresenceHubService } from './presence-hub.service';
@Injectable({ @Injectable({
providedIn: 'root' providedIn: 'root'
@ -17,6 +15,7 @@ export class AccountService implements OnDestroy {
baseUrl = environment.apiUrl; baseUrl = environment.apiUrl;
userKey = 'kavita-user'; userKey = 'kavita-user';
public lastLoginKey = 'kavita-lastlogin';
currentUser: User | undefined; currentUser: User | undefined;
// Stores values, when someone subscribes gives (1) of last values seen. // Stores values, when someone subscribes gives (1) of last values seen.
@ -26,7 +25,7 @@ export class AccountService implements OnDestroy {
private readonly onDestroy = new Subject<void>(); private readonly onDestroy = new Subject<void>();
constructor(private httpClient: HttpClient, private router: Router, constructor(private httpClient: HttpClient, private router: Router,
private messageHub: MessageHubService, private presenceHub: PresenceHubService) {} private messageHub: MessageHubService) {}
ngOnDestroy(): void { ngOnDestroy(): void {
this.onDestroy.next(); this.onDestroy.next();
@ -51,8 +50,7 @@ export class AccountService implements OnDestroy {
const user = response; const user = response;
if (user) { if (user) {
this.setCurrentUser(user); this.setCurrentUser(user);
this.messageHub.createHubConnection(user); this.messageHub.createHubConnection(user, this.hasAdminRole(user));
this.presenceHub.createHubConnection(user);
} }
}), }),
takeUntil(this.onDestroy) takeUntil(this.onDestroy)
@ -64,14 +62,9 @@ export class AccountService implements OnDestroy {
user.roles = []; user.roles = [];
const roles = this.getDecodedToken(user.token).role; const roles = this.getDecodedToken(user.token).role;
Array.isArray(roles) ? user.roles = roles : user.roles.push(roles); Array.isArray(roles) ? user.roles = roles : user.roles.push(roles);
Sentry.setContext('admin', {'admin': this.hasAdminRole(user)});
Sentry.configureScope(scope => {
scope.setUser({
username: user.username
});
});
localStorage.setItem(this.userKey, JSON.stringify(user)); localStorage.setItem(this.userKey, JSON.stringify(user));
localStorage.setItem(this.lastLoginKey, user.username);
} }
this.currentUserSource.next(user); this.currentUserSource.next(user);
@ -85,7 +78,6 @@ export class AccountService implements OnDestroy {
// Upon logout, perform redirection // Upon logout, perform redirection
this.router.navigateByUrl('/login'); this.router.navigateByUrl('/login');
this.messageHub.stopHubConnection(); this.messageHub.stopHubConnection();
this.presenceHub.stopHubConnection();
} }
register(model: {username: string, password: string, isAdmin?: boolean}) { register(model: {username: string, password: string, isAdmin?: boolean}) {

View File

@ -6,6 +6,7 @@ import { take } from 'rxjs/operators';
import { BookmarksModalComponent } from '../cards/_modals/bookmarks-modal/bookmarks-modal.component'; import { BookmarksModalComponent } from '../cards/_modals/bookmarks-modal/bookmarks-modal.component';
import { AddToListModalComponent, ADD_FLOW } from '../reading-list/_modals/add-to-list-modal/add-to-list-modal.component'; import { AddToListModalComponent, ADD_FLOW } from '../reading-list/_modals/add-to-list-modal/add-to-list-modal.component';
import { EditReadingListModalComponent } from '../reading-list/_modals/edit-reading-list-modal/edit-reading-list-modal.component'; import { EditReadingListModalComponent } from '../reading-list/_modals/edit-reading-list-modal/edit-reading-list-modal.component';
import { ConfirmService } from '../shared/confirm.service';
import { Chapter } from '../_models/chapter'; import { Chapter } from '../_models/chapter';
import { Library } from '../_models/library'; import { Library } from '../_models/library';
import { ReadingList } from '../_models/reading-list'; import { ReadingList } from '../_models/reading-list';
@ -35,7 +36,8 @@ export class ActionService implements OnDestroy {
private readingListModalRef: NgbModalRef | null = null; private readingListModalRef: NgbModalRef | null = null;
constructor(private libraryService: LibraryService, private seriesService: SeriesService, constructor(private libraryService: LibraryService, private seriesService: SeriesService,
private readerService: ReaderService, private toastr: ToastrService, private modalService: NgbModal) { } private readerService: ReaderService, private toastr: ToastrService, private modalService: NgbModal,
private confirmService: ConfirmService) { }
ngOnDestroy() { ngOnDestroy() {
this.onDestroy.next(); this.onDestroy.next();
@ -66,11 +68,15 @@ export class ActionService implements OnDestroy {
* @param callback Optional callback to perform actions after API completes * @param callback Optional callback to perform actions after API completes
* @returns * @returns
*/ */
refreshMetadata(library: Partial<Library>, callback?: LibraryActionCallback) { async refreshMetadata(library: Partial<Library>, callback?: LibraryActionCallback) {
if (!library.hasOwnProperty('id') || library.id === undefined) { if (!library.hasOwnProperty('id') || library.id === undefined) {
return; return;
} }
if (!await this.confirmService.confirm('Refresh metadata will force all cover images and metadata to be recalculated. This is a heavy operation. Are you sure you don\'t want to perform a Scan instead?')) {
return;
}
this.libraryService.refreshMetadata(library?.id).pipe(take(1)).subscribe((res: any) => { this.libraryService.refreshMetadata(library?.id).pipe(take(1)).subscribe((res: any) => {
this.toastr.success('Scan started for ' + library.name); this.toastr.success('Scan started for ' + library.name);
if (callback) { if (callback) {
@ -128,7 +134,11 @@ export class ActionService implements OnDestroy {
* @param series Series, must have libraryId, id and name populated * @param series Series, must have libraryId, id and name populated
* @param callback Optional callback to perform actions after API completes * @param callback Optional callback to perform actions after API completes
*/ */
refreshMetdata(series: Series, callback?: SeriesActionCallback) { async refreshMetdata(series: Series, callback?: SeriesActionCallback) {
if (!await this.confirmService.confirm('Refresh metadata will force all cover images and metadata to be recalculated. This is a heavy operation. Are you sure you don\'t want to perform a Scan instead?')) {
return;
}
this.seriesService.refreshMetadata(series).pipe(take(1)).subscribe((res: any) => { this.seriesService.refreshMetadata(series).pipe(take(1)).subscribe((res: any) => {
this.toastr.success('Refresh started for ' + series.name); this.toastr.success('Refresh started for ' + series.name);
if (callback) { if (callback) {
@ -235,10 +245,10 @@ export class ActionService implements OnDestroy {
markMultipleAsUnread(seriesId: number, volumes: Array<Volume>, chapters?: Array<Chapter>, callback?: VoidActionCallback) { markMultipleAsUnread(seriesId: number, volumes: Array<Volume>, chapters?: Array<Chapter>, callback?: VoidActionCallback) {
this.readerService.markMultipleUnread(seriesId, volumes.map(v => v.id), chapters?.map(c => c.id)).pipe(take(1)).subscribe(() => { this.readerService.markMultipleUnread(seriesId, volumes.map(v => v.id), chapters?.map(c => c.id)).pipe(take(1)).subscribe(() => {
volumes.forEach(volume => { volumes.forEach(volume => {
volume.pagesRead = volume.pages; volume.pagesRead = 0;
volume.chapters?.forEach(c => c.pagesRead = c.pages); volume.chapters?.forEach(c => c.pagesRead = 0);
}); });
chapters?.forEach(c => c.pagesRead = c.pages); chapters?.forEach(c => c.pagesRead = 0);
this.toastr.success('Marked as Read'); this.toastr.success('Marked as Read');
if (callback) { if (callback) {

View File

@ -16,6 +16,10 @@ export class MemberService {
return this.httpClient.get<Member[]>(this.baseUrl + 'users'); return this.httpClient.get<Member[]>(this.baseUrl + 'users');
} }
getMemberNames() {
return this.httpClient.get<string[]>(this.baseUrl + 'users/names');
}
adminExists() { adminExists() {
return this.httpClient.get<boolean>(this.baseUrl + 'admin/exists'); return this.httpClient.get<boolean>(this.baseUrl + 'admin/exists');
} }

Some files were not shown because too many files have changed in this diff Show More