Fixed some bad test cases that really messed up my codebase.

This commit is contained in:
Joseph Milazzo 2021-03-22 16:02:07 -05:00
parent b66c6b5714
commit 585e965a85
11 changed files with 97 additions and 93 deletions

View File

@ -63,7 +63,6 @@ namespace API.Tests.Services
[InlineData("not supported 1.zip", 1)]
[InlineData("not supported 2.cbz", 0)]
[InlineData("not supported 3.cbz", 0)]
[InlineData("mangadex_131.zip", 577)]
public void GetNumberOfPagesFromArchiveTest(string archivePath, int expected)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
@ -119,7 +118,6 @@ namespace API.Tests.Services
[InlineData("not supported 1.zip", 1)]
[InlineData("not supported 2.cbz", 169)]
[InlineData("not supported 3.cbz", 1)]
[InlineData("mangadex_131.zip", 577)]
public void CanExtractArchive(string archivePath, int expectedFileCount)
{
@ -128,17 +126,10 @@ namespace API.Tests.Services
DirectoryService.ClearAndDeleteDirectory(extractDirectory);
Stopwatch sw = Stopwatch.StartNew();
try
{
_archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory);
var di1 = new DirectoryInfo(extractDirectory);
Assert.Equal(expectedFileCount, di1.GetFiles().Length);
_testOutputHelper.WriteLine($"Processed Original in {sw.ElapsedMilliseconds} ms");
}
catch (Exception e)
{
_testOutputHelper.WriteLine("Could not process");
}
_archiveService.ExtractArchive(Path.Join(testDirectory, archivePath), extractDirectory);
var di1 = new DirectoryInfo(extractDirectory);
Assert.Equal(expectedFileCount, di1.Exists ? di1.GetFiles().Length : 0);
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
DirectoryService.ClearAndDeleteDirectory(extractDirectory);
}
@ -159,6 +150,15 @@ namespace API.Tests.Services
_testOutputHelper.WriteLine($"Processed in {sw.ElapsedMilliseconds} ms");
}
[Theory]
[InlineData("not supported 2.cbz")]
[InlineData("06_v01[DMM].zip")]
public void CanParseCoverImage(string inputFile)
{
var testDirectory = Path.Join(Directory.GetCurrentDirectory(), "../../../Services/Test Data/ArchiveService/Archives");
Assert.NotEmpty(_archiveService.GetCoverImage(Path.Join(testDirectory, inputFile)));
}
[Fact]
public void ShouldHaveComicInfo()
{

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

View File

@ -52,7 +52,7 @@ namespace API.Controllers
var dateString = DateTime.Now.ToShortDateString().Replace("/", "_");
var tempLocation = Path.Join(tempDirectory, "logs_" + dateString);
_directoryService.ExistOrCreate(tempLocation);
DirectoryService.ExistOrCreate(tempLocation);
if (!_directoryService.CopyFilesToDirectory(files, tempLocation))
{
return BadRequest("Unable to copy files to temp directory for log download.");

View File

@ -20,12 +20,7 @@ namespace API.Interfaces.Services
/// <param name="searchPatternExpression"></param>
/// <returns></returns>
string[] GetFilesWithExtension(string path, string searchPatternExpression = "");
/// <summary>
/// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
bool ExistOrCreate(string directoryPath);
//bool ExistOrCreate(string directoryPath);
Task<byte[]> ReadFileAsync(string path);

View File

@ -9,7 +9,7 @@ namespace API.Parser
public static class Parser
{
public static readonly string MangaFileExtensions = @"\.cbz|\.zip|\.rar|\.cbr|.tar.gz|.7zip";
public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg|\.gif";
public static readonly string ImageFileExtensions = @"\.png|\.jpeg|\.jpg";
private static readonly string XmlRegexExtensions = @"\.xml";
private static readonly Regex ImageRegex = new Regex(ImageFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex MangaFileRegex = new Regex(MangaFileExtensions, RegexOptions.IgnoreCase | RegexOptions.Compiled);

View File

@ -87,39 +87,38 @@ namespace API.Services
{
if (!IsValidArchive(filepath)) return Array.Empty<byte>();
if (SharpCompress.Archives.Zip.ZipArchive.IsZipFile(filepath))
{
using var archive = SharpCompress.Archives.Zip.ZipArchive.Open(filepath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory), createThumbnail);
}
if (GZipArchive.IsGZipFile(filepath))
{
using var archive = GZipArchive.Open(filepath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory), createThumbnail);
}
if (RarArchive.IsRarFile(filepath))
{
using var archive = RarArchive.Open(filepath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory), createThumbnail);
}
if (SevenZipArchive.IsSevenZipFile(filepath))
{
using var archive = SevenZipArchive.Open(filepath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory), createThumbnail);
}
if (TarArchive.IsTarFile(filepath))
{
using var archive = TarArchive.Open(filepath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory), createThumbnail);
}
_logger.LogError("Could not parse archive file");
return Array.Empty<byte>();
// if (SharpCompress.Archives.Zip.ZipArchive.IsZipFile(filepath))
// {
// using var archive = SharpCompress.Archives.Zip.ZipArchive.Open(filepath);
// return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
// }
//
// if (GZipArchive.IsGZipFile(filepath))
// {
// using var archive = GZipArchive.Open(filepath);
// return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
// }
//
// if (RarArchive.IsRarFile(filepath))
// {
// using var archive = RarArchive.Open(filepath);
// return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
// }
//
// if (SevenZipArchive.IsSevenZipFile(filepath))
// {
// using var archive = SevenZipArchive.Open(filepath);
// return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
// }
//
// if (TarArchive.IsTarFile(filepath))
// {
// using var archive = TarArchive.Open(filepath);
// return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
// }
using var archive = ArchiveFactory.Open(filepath);
return FindCoverImage(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), createThumbnail);
}
catch (Exception ex)
{
@ -163,6 +162,7 @@ namespace API.Services
{
formatExtension = "." + formatExtension;
}
// TODO: Validate if jpeg is same as jpg
try
{
using var thumbnail = Image.ThumbnailBuffer(entry, ThumbnailWidth);
@ -230,27 +230,27 @@ namespace API.Services
if (SharpCompress.Archives.Zip.ZipArchive.IsZipFile(archivePath))
{
using var archive = SharpCompress.Archives.Zip.ZipArchive.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory));
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
}
else if (GZipArchive.IsGZipFile(archivePath))
{
using var archive = GZipArchive.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory));
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
}
else if (RarArchive.IsRarFile(archivePath))
{
using var archive = RarArchive.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory));
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
}
else if (SevenZipArchive.IsSevenZipFile(archivePath))
{
using var archive = SevenZipArchive.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory));
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
}
else if (TarArchive.IsTarFile(archivePath))
{
using var archive = TarArchive.Open(archivePath);
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory));
info = FindComicInfoXml(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsXml(entry.Key)));
}
if (info != null)
@ -270,6 +270,7 @@ namespace API.Services
private void ExtractArchiveEntities(IEnumerable<IArchiveEntry> entries, string extractPath)
{
DirectoryService.ExistOrCreate(extractPath);
foreach (var entry in entries)
{
entry.WriteToDirectory(extractPath, new ExtractionOptions()
@ -293,34 +294,38 @@ namespace API.Services
if (!File.Exists(archivePath)) return;
var sw = Stopwatch.StartNew();
if (SharpCompress.Archives.Zip.ZipArchive.IsZipFile(archivePath))
{
using var archive = SharpCompress.Archives.Zip.ZipArchive.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory), extractPath);
}
else if (GZipArchive.IsGZipFile(archivePath))
{
using var archive = GZipArchive.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory), extractPath);
} else if (RarArchive.IsRarFile(archivePath))
{
using var archive = RarArchive.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory), extractPath);
} else if (SevenZipArchive.IsSevenZipFile(archivePath))
{
using var archive = SevenZipArchive.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory), extractPath);
}
else if (TarArchive.IsTarFile(archivePath))
{
using var archive = TarArchive.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory), extractPath);
}
else
{
_logger.LogError("Could not parse archive file");
return;
}
// if (SharpCompress.Archives.Zip.ZipArchive.IsZipFile(archivePath))
// {
//
// //using var archive = SharpCompress.Archives.Zip.ZipArchive.Open(archivePath);
// using var archive = ArchiveFactory.Open(archivePath);
// ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
// }
// else if (GZipArchive.IsGZipFile(archivePath))
// {
// using var archive = GZipArchive.Open(archivePath);
// ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
// } else if (RarArchive.IsRarFile(archivePath))
// {
// using var archive = RarArchive.Open(archivePath);
// ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
// } else if (SevenZipArchive.IsSevenZipFile(archivePath))
// {
// using var archive = SevenZipArchive.Open(archivePath);
// ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
// }
// else if (TarArchive.IsTarFile(archivePath))
// {
// using var archive = TarArchive.Open(archivePath);
// ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
// }
// else
// {
// _logger.LogError("Could not parse archive file");
// return;
// }
using var archive = ArchiveFactory.Open(archivePath);
ExtractArchiveEntities(archive.Entries.Where(entry => !entry.IsDirectory && Parser.Parser.IsImage(entry.Key)), extractPath);
_logger.LogDebug("[Fallback] Extracted archive to {ExtractPath} in {ElapsedMilliseconds} milliseconds", extractPath, sw.ElapsedMilliseconds);
}

View File

@ -32,7 +32,7 @@ namespace API.Services
public void EnsureCacheDirectory()
{
_logger.LogDebug("Checking if valid Cache directory: {CacheDirectory}", CacheDirectory);
if (!_directoryService.ExistOrCreate(CacheDirectory))
if (!DirectoryService.ExistOrCreate(CacheDirectory))
{
_logger.LogError("Cache directory {CacheDirectory} is not accessible or does not exist. Creating...", CacheDirectory);
}

View File

@ -71,7 +71,12 @@ namespace API.Services
return !Directory.Exists(path) ? Array.Empty<string>() : Directory.GetFiles(path);
}
public bool ExistOrCreate(string directoryPath)
/// <summary>
/// Returns true if the path exists and is a directory. If path does not exist, this will create it. Returns false in all fail cases.
/// </summary>
/// <param name="directoryPath"></param>
/// <returns></returns>
public static bool ExistOrCreate(string directoryPath)
{
var di = new DirectoryInfo(directoryPath);
if (di.Exists) return true;
@ -81,7 +86,6 @@ namespace API.Services
}
catch (Exception ex)
{
_logger.LogError(ex, "There was an issue creating directory: {Directory}", directoryPath);
return false;
}
return true;

View File

@ -66,7 +66,7 @@ namespace API.Services.Tasks
var backupDirectory = Task.Run(() => _unitOfWork.SettingsRepository.GetSettingAsync(ServerSettingKey.BackupDirectory)).Result.Value;
_logger.LogDebug("Backing up to {BackupDirectory}", backupDirectory);
if (!_directoryService.ExistOrCreate(backupDirectory))
if (!DirectoryService.ExistOrCreate(backupDirectory))
{
_logger.LogError("Could not write to {BackupDirectory}; aborting backup", backupDirectory);
return;
@ -82,7 +82,7 @@ namespace API.Services.Tasks
}
var tempDirectory = Path.Join(_tempDirectory, dateString);
_directoryService.ExistOrCreate(tempDirectory);
DirectoryService.ExistOrCreate(tempDirectory);
_directoryService.ClearDirectory(tempDirectory);
_directoryService.CopyFilesToDirectory(

View File

@ -33,7 +33,7 @@ namespace API.Services.Tasks
_metadataService = metadataService;
}
//[DisableConcurrentExecution(timeoutInSeconds: 5)]
[DisableConcurrentExecution(timeoutInSeconds: 5)]
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibraries()
{
@ -64,7 +64,7 @@ namespace API.Services.Tasks
_scannedSeries = null;
}
//[DisableConcurrentExecution(5)]
[DisableConcurrentExecution(5)]
[AutomaticRetry(Attempts = 0, LogEvents = false, OnAttemptsExceeded = AttemptsExceededAction.Delete)]
public void ScanLibrary(int libraryId, bool forceUpdate)
{