mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-05-24 01:12:54 -04:00
Feature/migration-rewrite (#278)
* start * migration rewrite * update name * convert chowdown to new methods * refactor/remove duplicate code * refactor to unify logger + log to file * remove toolbox * Display report on UI Co-authored-by: hay-kot <hay-kot@pm.me>
This commit is contained in:
parent
ed49eb7e80
commit
42deb5ad10
File diff suppressed because one or more lines are too long
@ -45,7 +45,7 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DataTable from "./DataTable";
|
||||
import DataTable from "@/components/Admin/Backup/ImportSummaryDialog/DataTable";
|
||||
export default {
|
||||
components: {
|
||||
DataTable,
|
||||
@ -145,4 +145,4 @@ export default {
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
||||
</style>
|
||||
|
@ -1,5 +1,6 @@
|
||||
<template>
|
||||
<v-card outlined class="my-2" :loading="loading">
|
||||
<MigrationDialog ref="migrationDialog" />
|
||||
<v-card-title>
|
||||
{{ title }}
|
||||
<v-spacer></v-spacer>
|
||||
@ -67,6 +68,7 @@
|
||||
import UploadBtn from "../../UI/UploadBtn";
|
||||
import utils from "@/utils";
|
||||
import { api } from "@/api";
|
||||
import MigrationDialog from "@/components/Admin/Migration/MigrationDialog.vue";
|
||||
export default {
|
||||
props: {
|
||||
folder: String,
|
||||
@ -76,6 +78,7 @@ export default {
|
||||
},
|
||||
components: {
|
||||
UploadBtn,
|
||||
MigrationDialog,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
@ -90,7 +93,8 @@ export default {
|
||||
async importMigration(file_name) {
|
||||
this.loading = true;
|
||||
let response = await api.migrations.import(this.folder, file_name);
|
||||
this.$emit("imported", response.successful, response.failed);
|
||||
this.$refs.migrationDialog.open(response);
|
||||
// this.$emit("imported", response.successful, response.failed);
|
||||
this.loading = false;
|
||||
},
|
||||
readableTime(timestamp) {
|
||||
|
109
frontend/src/components/Admin/Migration/MigrationDialog.vue
Normal file
109
frontend/src/components/Admin/Migration/MigrationDialog.vue
Normal file
@ -0,0 +1,109 @@
|
||||
|
||||
|
||||
<template>
|
||||
<div class="text-center">
|
||||
<v-dialog v-model="dialog" width="70%">
|
||||
<v-card>
|
||||
<v-app-bar dark color="primary mb-2">
|
||||
<v-icon large left>
|
||||
mdi-import
|
||||
</v-icon>
|
||||
<v-toolbar-title class="headline">
|
||||
Migration Summary
|
||||
</v-toolbar-title>
|
||||
<v-spacer></v-spacer>
|
||||
</v-app-bar>
|
||||
<v-card-text class="mb-n4">
|
||||
<v-row>
|
||||
<div v-for="values in allNumbers" :key="values.title">
|
||||
<v-card-text>
|
||||
<div>
|
||||
<h3>{{ values.title }}</h3>
|
||||
</div>
|
||||
<div class="success--text">Success: {{ values.success }}</div>
|
||||
<div class="error--text">Failed: {{ values.failure }}</div>
|
||||
</v-card-text>
|
||||
</div>
|
||||
</v-row>
|
||||
</v-card-text>
|
||||
<v-tabs v-model="tab">
|
||||
<v-tab>{{ $t("general.recipes") }}</v-tab>
|
||||
</v-tabs>
|
||||
<v-tabs-items v-model="tab">
|
||||
<v-tab-item v-for="(table, index) in allTables" :key="index">
|
||||
<v-card flat>
|
||||
<DataTable :data-headers="importHeaders" :data-set="table" />
|
||||
</v-card>
|
||||
</v-tab-item>
|
||||
</v-tabs-items>
|
||||
</v-card>
|
||||
</v-dialog>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import DataTable from "@/components/Admin/Backup/ImportSummaryDialog/DataTable";
|
||||
export default {
|
||||
components: {
|
||||
DataTable,
|
||||
},
|
||||
data: () => ({
|
||||
tab: null,
|
||||
dialog: false,
|
||||
recipeData: [],
|
||||
themeData: [],
|
||||
settingsData: [],
|
||||
userData: [],
|
||||
groupData: [],
|
||||
pageData: [],
|
||||
importHeaders: [
|
||||
{
|
||||
text: "Status",
|
||||
value: "status",
|
||||
},
|
||||
{
|
||||
text: "Name",
|
||||
align: "start",
|
||||
sortable: true,
|
||||
value: "name",
|
||||
},
|
||||
|
||||
{ text: "Exception", value: "data-table-expand", align: "center" },
|
||||
],
|
||||
allDataTables: [],
|
||||
}),
|
||||
|
||||
computed: {
|
||||
recipeNumbers() {
|
||||
return this.calculateNumbers(this.$t("general.recipes"), this.recipeData);
|
||||
},
|
||||
allNumbers() {
|
||||
return [this.recipeNumbers];
|
||||
},
|
||||
allTables() {
|
||||
return [this.recipeData];
|
||||
},
|
||||
},
|
||||
|
||||
methods: {
|
||||
calculateNumbers(title, list_array) {
|
||||
if (!list_array) return;
|
||||
let numbers = { title: title, success: 0, failure: 0 };
|
||||
list_array.forEach(element => {
|
||||
if (element.status) {
|
||||
numbers.success++;
|
||||
} else numbers.failure++;
|
||||
});
|
||||
return numbers;
|
||||
},
|
||||
open(importData) {
|
||||
this.recipeData = importData;
|
||||
|
||||
this.dialog = true;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
@ -1,6 +1,7 @@
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.logger import logger
|
||||
|
||||
from mealie.core import root_logger
|
||||
|
||||
# import utils.startup as startup
|
||||
from mealie.core.config import APP_VERSION, settings
|
||||
@ -11,6 +12,8 @@ from mealie.routes.recipe import all_recipe_routes, category_routes, recipe_crud
|
||||
from mealie.routes.site_settings import all_settings
|
||||
from mealie.routes.users import users
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
app = FastAPI(
|
||||
title="Mealie",
|
||||
description="A place for all your recipes",
|
||||
@ -50,8 +53,15 @@ api_routers()
|
||||
start_scheduler()
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
def system_startup():
|
||||
logger.info("-----SYSTEM STARTUP----- \n")
|
||||
logger.info("------APP SETTINGS------")
|
||||
logger.info(settings.json(indent=4, exclude={"SECRET", "DEFAULT_PASSWORD", "SFTP_PASSWORD", "SFTP_USERNAME"}))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
|
||||
uvicorn.run(
|
||||
"app:app",
|
||||
host="0.0.0.0",
|
||||
@ -60,11 +70,11 @@ def main():
|
||||
reload_dirs=["mealie"],
|
||||
debug=True,
|
||||
log_level="info",
|
||||
log_config=None,
|
||||
workers=1,
|
||||
forwarded_allow_ips="*",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("-----SYSTEM STARTUP-----")
|
||||
main()
|
||||
|
@ -40,7 +40,6 @@ def determine_secrets(data_dir: Path, production: bool) -> str:
|
||||
|
||||
# General
|
||||
DATA_DIR = determine_data_dir(PRODUCTION)
|
||||
LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
|
||||
|
||||
|
||||
class AppDirectories:
|
||||
|
43
mealie/core/root_logger.py
Normal file
43
mealie/core/root_logger.py
Normal file
@ -0,0 +1,43 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from mealie.core.config import DATA_DIR
|
||||
|
||||
LOGGER_FILE = DATA_DIR.joinpath("mealie.log")
|
||||
LOGGER_FORMAT = "%(levelname)s: \t%(message)s"
|
||||
DATE_FORMAT = "%d-%b-%y %H:%M:%S"
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format=LOGGER_FORMAT, datefmt="%d-%b-%y %H:%M:%S")
|
||||
|
||||
|
||||
def logger_init() -> logging.Logger:
|
||||
""" Returns the Root Loggin Object for Mealie """
|
||||
logger = logging.getLogger("mealie")
|
||||
logger.propagate = False
|
||||
|
||||
# File Handler
|
||||
output_file_handler = logging.FileHandler(LOGGER_FILE)
|
||||
handler_format = logging.Formatter(LOGGER_FORMAT, datefmt=DATE_FORMAT)
|
||||
output_file_handler.setFormatter(handler_format)
|
||||
|
||||
# Stdout
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(handler_format)
|
||||
|
||||
logger.addHandler(output_file_handler)
|
||||
logger.addHandler(stdout_handler)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
def get_logger(module=None) -> logging.Logger:
|
||||
""" Returns a child logger for mealie """
|
||||
global root_logger
|
||||
|
||||
if module is None:
|
||||
return root_logger
|
||||
|
||||
return root_logger.getChild(module)
|
||||
|
||||
|
||||
root_logger = logger_init()
|
@ -1,4 +1,4 @@
|
||||
from fastapi.logger import logger
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import settings
|
||||
from mealie.core.security import get_password_hash
|
||||
from mealie.db.database import db
|
||||
@ -7,6 +7,8 @@ from mealie.schema.settings import SiteSettings
|
||||
from mealie.schema.theme import SiteTheme
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
logger = root_logger.get_logger("init_db")
|
||||
|
||||
|
||||
def init_db(db: Session = None) -> None:
|
||||
if not db:
|
||||
@ -47,6 +49,7 @@ def default_user_init(session: Session):
|
||||
logger.info("Generating Default User")
|
||||
db.users.create(session, default_user)
|
||||
|
||||
|
||||
def main():
|
||||
if sql_exists:
|
||||
print("Database Exists")
|
||||
@ -54,5 +57,6 @@ def main():
|
||||
print("Database Doesn't Exists, Initializing...")
|
||||
init_db()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
main()
|
||||
|
@ -1,10 +1,12 @@
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from fastapi.logger import logger
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.models.model_base import SqlAlchemyBase
|
||||
from slugify import slugify
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
site_settings2categories = sa.Table(
|
||||
"site_settings2categoories",
|
||||
SqlAlchemyBase.metadata,
|
||||
|
@ -1,10 +1,12 @@
|
||||
import sqlalchemy as sa
|
||||
import sqlalchemy.orm as orm
|
||||
from fastapi.logger import logger
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.models.model_base import SqlAlchemyBase
|
||||
from slugify import slugify
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
recipes2tags = sa.Table(
|
||||
"recipes2tags",
|
||||
SqlAlchemyBase.metadata,
|
||||
|
@ -1,7 +1,8 @@
|
||||
import json
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from mealie.core.config import APP_VERSION, LOGGER_FILE, app_dirs, settings
|
||||
from mealie.core.config import APP_VERSION, app_dirs, settings
|
||||
from mealie.core.root_logger import LOGGER_FILE
|
||||
from mealie.routes.deps import get_current_user
|
||||
from mealie.schema.debug import AppInfo, DebugInfo
|
||||
|
||||
|
@ -8,15 +8,14 @@ from mealie.db.db_setup import generate_session
|
||||
from mealie.routes.deps import get_current_user
|
||||
from mealie.schema.migration import MigrationFile, Migrations
|
||||
from mealie.schema.snackbar import SnackResponse
|
||||
from mealie.services.migrations.chowdown import chowdown_migrate as chowdow_migrate
|
||||
from mealie.services.migrations.nextcloud import migrate as nextcloud_migrate
|
||||
from mealie.services.migrations import migration
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
router = APIRouter(prefix="/api/migrations", tags=["Migration"], dependencies=[Depends(get_current_user)])
|
||||
|
||||
|
||||
@router.get("", response_model=List[Migrations])
|
||||
def get_avaiable_nextcloud_imports():
|
||||
def get_all_migration_options():
|
||||
""" Returns a list of avaiable directories that can be imported into Mealie """
|
||||
response_data = []
|
||||
migration_dirs = [
|
||||
@ -36,23 +35,18 @@ def get_avaiable_nextcloud_imports():
|
||||
return response_data
|
||||
|
||||
|
||||
@router.post("/{type}/{file_name}/import")
|
||||
def import_nextcloud_directory(type: str, file_name: str, session: Session = Depends(generate_session)):
|
||||
@router.post("/{import_type}/{file_name}/import")
|
||||
def import_migration(import_type: migration.Migration, file_name: str, session: Session = Depends(generate_session)):
|
||||
""" Imports all the recipes in a given directory """
|
||||
file_path = app_dirs.MIGRATION_DIR.joinpath(type, file_name)
|
||||
if type == "nextcloud":
|
||||
return nextcloud_migrate(session, file_path)
|
||||
elif type == "chowdown":
|
||||
return chowdow_migrate(session, file_path)
|
||||
else:
|
||||
return SnackResponse.error("Incorrect Migration Type Selected")
|
||||
file_path = app_dirs.MIGRATION_DIR.joinpath(import_type.value, file_name)
|
||||
return migration.migrate(import_type, file_path, session)
|
||||
|
||||
|
||||
@router.delete("/{type}/{file_name}/delete")
|
||||
def delete_migration_data(type: str, file_name: str):
|
||||
@router.delete("/{import_type}/{file_name}/delete")
|
||||
def delete_migration_data(import_type: migration.Migration, file_name: str):
|
||||
""" Removes migration data from the file system """
|
||||
|
||||
remove_path = app_dirs.MIGRATION_DIR.joinpath(type, file_name)
|
||||
remove_path = app_dirs.MIGRATION_DIR.joinpath(import_type.value, file_name)
|
||||
|
||||
if remove_path.is_file():
|
||||
remove_path.unlink()
|
||||
@ -64,10 +58,10 @@ def delete_migration_data(type: str, file_name: str):
|
||||
return SnackResponse.error(f"Migration Data Remove: {remove_path.absolute()}")
|
||||
|
||||
|
||||
@router.post("/{type}/upload")
|
||||
def upload_nextcloud_zipfile(type: str, archive: UploadFile = File(...)):
|
||||
@router.post("/{import_type}/upload")
|
||||
def upload_nextcloud_zipfile(import_type: migration.Migration, archive: UploadFile = File(...)):
|
||||
""" Upload a .zip File to later be imported into Mealie """
|
||||
dir = app_dirs.MIGRATION_DIR.joinpath(type)
|
||||
dir = app_dirs.MIGRATION_DIR.joinpath(import_type.value)
|
||||
dir.mkdir(parents=True, exist_ok=True)
|
||||
dest = dir.joinpath(archive.filename)
|
||||
|
||||
|
@ -19,6 +19,7 @@ async def get_all_recipe_tags(session: Session = Depends(generate_session)):
|
||||
""" Returns a list of available tags in the database """
|
||||
return db.tags.get_all_limit_columns(session, ["slug", "name"])
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def create_recipe_tag(
|
||||
tag: TagIn, session: Session = Depends(generate_session), current_user=Depends(get_current_user)
|
||||
|
@ -7,9 +7,10 @@ class AppInfo(CamelModel):
|
||||
version: str
|
||||
demo_status: bool
|
||||
|
||||
|
||||
class DebugInfo(AppInfo):
|
||||
api_port: int
|
||||
api_docs: bool
|
||||
db_type: str
|
||||
sqlite_file: Path
|
||||
default_group: str
|
||||
default_group: str
|
||||
|
@ -1,6 +1,7 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from mealie.schema.restore import RecipeImport
|
||||
from pydantic.main import BaseModel
|
||||
|
||||
|
||||
@ -23,3 +24,7 @@ class MigrationFile(BaseModel):
|
||||
class Migrations(BaseModel):
|
||||
type: str
|
||||
files: List[MigrationFile] = []
|
||||
|
||||
|
||||
class MigrationImport(RecipeImport):
|
||||
pass
|
||||
|
@ -4,13 +4,15 @@ from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
from fastapi.logger import logger
|
||||
from mealie.core import root_logger
|
||||
from jinja2 import Template
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.db.database import db
|
||||
from mealie.db.db_setup import create_session
|
||||
from pydantic.main import BaseModel
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
class ExportDatabase:
|
||||
def __init__(self, tag=None, templates=None) -> None:
|
||||
|
@ -1,13 +1,14 @@
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
import requests
|
||||
from fastapi.logger import logger
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.services.image import minify
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImageOptions:
|
||||
@ -57,7 +58,7 @@ def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path.name
|
||||
pass
|
||||
|
||||
image_dir = Path(app_dirs.IMG_DIR.joinpath(f"{recipe_slug}"))
|
||||
image_dir.mkdir()
|
||||
image_dir.mkdir(exist_ok=True, parents=True)
|
||||
extension = extension.replace(".", "")
|
||||
image_path = image_dir.joinpath(f"original.{extension}")
|
||||
|
||||
@ -65,8 +66,7 @@ def write_image(recipe_slug: str, file_data: bytes, extension: str) -> Path.name
|
||||
with open(image_path, "ab") as f:
|
||||
f.write(file_data)
|
||||
else:
|
||||
with open(image_path, "ab") as f:
|
||||
shutil.copyfileobj(file_data, f)
|
||||
shutil.copy2(file_data, image_path)
|
||||
|
||||
minify.migrate_images()
|
||||
|
||||
|
@ -1,13 +1,15 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi.logger import logger
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.db.database import db
|
||||
from mealie.db.db_setup import create_session
|
||||
from PIL import Image
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
def minify_image(image_file: Path, min_dest: Path, tiny_dest: Path):
|
||||
"""Minifies an image in it's original file format. Quality is lost
|
||||
|
174
mealie/services/migrations/_migration_base.py
Normal file
174
mealie/services/migrations/_migration_base.py
Normal file
@ -0,0 +1,174 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import yaml
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.database import db
|
||||
from mealie.schema.migration import MigrationImport
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.image import image, minify
|
||||
from mealie.services.scraper.cleaner import Cleaner
|
||||
from mealie.utils.unzip import unpack_zip
|
||||
from pydantic import BaseModel
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
class MigrationAlias(BaseModel):
|
||||
"""A datatype used by MigrationBase to pre-process a recipe dictionary to rewrite
|
||||
the alias key in the dictionary, if it exists, to the key. If set a `func` attribute
|
||||
will be called on the value before assigning the value to the new key
|
||||
"""
|
||||
|
||||
key: str
|
||||
alias: str
|
||||
func: Optional[Callable] = None
|
||||
|
||||
|
||||
class MigrationBase(BaseModel):
|
||||
migration_report: list[MigrationImport] = []
|
||||
migration_file: Path
|
||||
session: Optional[Any]
|
||||
key_aliases: Optional[list[MigrationAlias]]
|
||||
|
||||
@property
|
||||
def temp_dir(self) -> TemporaryDirectory:
|
||||
"""unpacks the migration_file into a temporary directory
|
||||
that can be used as a context manager.
|
||||
|
||||
Returns:
|
||||
TemporaryDirectory:
|
||||
"""
|
||||
return unpack_zip(self.migration_file)
|
||||
|
||||
@staticmethod
|
||||
def json_reader(json_file: Path) -> dict:
|
||||
print(json_file)
|
||||
with open(json_file, "r") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
@staticmethod
|
||||
def yaml_reader(yaml_file: Path) -> dict:
|
||||
"""A helper function to read in a yaml file from a Path. This assumes that the
|
||||
first yaml document is the recipe data and the second, if exists, is the description.
|
||||
|
||||
Args:
|
||||
yaml_file (Path): Path to yaml file
|
||||
|
||||
Returns:
|
||||
dict: representing the yaml file as a dictionary
|
||||
"""
|
||||
with open(yaml_file, "r") as f:
|
||||
contents = f.read().split("---")
|
||||
recipe_data = {}
|
||||
for x, document in enumerate(contents):
|
||||
|
||||
# Check if None or Empty String
|
||||
if document is None or document == "":
|
||||
continue
|
||||
|
||||
# Check if 'title:' present
|
||||
elif "title:" in document:
|
||||
recipe_data.update(yaml.safe_load(document))
|
||||
|
||||
else:
|
||||
recipe_data["description"] = document
|
||||
|
||||
return recipe_data
|
||||
|
||||
@staticmethod
|
||||
def glob_walker(directory: Path, glob_str: str, return_parent=True) -> list[Path]: # TODO:
|
||||
"""A Helper function that will return the glob matches for the temporary directotry
|
||||
that was unpacked and passed in as the `directory` parameter. If `return_parent` is
|
||||
True the return Paths will be the parent directory for the file that was matched. If
|
||||
false the file itself will be returned.
|
||||
|
||||
Args:
|
||||
directory (Path): Path to search directory
|
||||
glob_str ([type]): glob style match string
|
||||
return_parent (bool, optional): To return parent directory of match. Defaults to True.
|
||||
|
||||
Returns:
|
||||
list[Path]:
|
||||
"""
|
||||
directory = directory if isinstance(directory, Path) else Path(directory)
|
||||
matches = []
|
||||
for match in directory.glob(glob_str):
|
||||
if return_parent:
|
||||
matches.append(match.parent)
|
||||
else:
|
||||
matches.append(match)
|
||||
|
||||
return matches
|
||||
|
||||
@staticmethod
|
||||
def import_image(src: Path, dest_slug: str):
|
||||
"""Read the successful migrations attribute and for each import the image
|
||||
appropriately into the image directory. Minification is done in mass
|
||||
after the migration occurs.
|
||||
"""
|
||||
image.write_image(dest_slug, src, extension=src.suffix)
|
||||
minify.migrate_images() # TODO: Refactor to support single file minification that doesn't suck
|
||||
|
||||
def rewrite_alias(self, recipe_dict: dict) -> dict:
|
||||
"""A helper function to reassign attributes by an alias using a list
|
||||
of MigrationAlias objects to rewrite the alias attribute found in the recipe_dict
|
||||
to a
|
||||
|
||||
Args:
|
||||
recipe_dict (dict): [description]
|
||||
key_aliases (list[MigrationAlias]): [description]
|
||||
|
||||
Returns:
|
||||
dict: [description]
|
||||
"""
|
||||
if not self.key_aliases:
|
||||
return recipe_dict
|
||||
|
||||
for alias in self.key_aliases:
|
||||
try:
|
||||
prop_value = recipe_dict.pop(alias.alias)
|
||||
except KeyError:
|
||||
logger.info(f"Key {alias.alias} Not Found. Skipping...")
|
||||
continue
|
||||
|
||||
if alias.func:
|
||||
prop_value = alias.func(prop_value)
|
||||
|
||||
recipe_dict[alias.key] = prop_value
|
||||
|
||||
return recipe_dict
|
||||
|
||||
def clean_recipe_dictionary(self, recipe_dict) -> Recipe:
|
||||
"""Calls the rewrite_alias function and the Cleaner.clean function on a
|
||||
dictionary and returns the result unpacked into a Recipe object"""
|
||||
recipe_dict = self.rewrite_alias(recipe_dict)
|
||||
recipe_dict = Cleaner.clean(recipe_dict, url=recipe_dict.get("orgURL", None))
|
||||
|
||||
return Recipe(**recipe_dict)
|
||||
|
||||
def import_recipes_to_database(self, validated_recipes: list[Recipe]) -> None:
|
||||
"""Used as a single access point to process a list of Recipe objects into the
|
||||
database in a predictable way. If an error occurs the session is rolled back
|
||||
and the process will continue. All import information is appended to the
|
||||
'migration_report' attribute to be returned to the frontend for display.
|
||||
|
||||
Args:
|
||||
validated_recipes (list[Recipe]):
|
||||
"""
|
||||
|
||||
for recipe in validated_recipes:
|
||||
exception = ""
|
||||
status = False
|
||||
try:
|
||||
db.recipes.create(self.session, recipe.dict())
|
||||
status = True
|
||||
|
||||
except Exception as inst:
|
||||
exception = inst
|
||||
self.session.rollback()
|
||||
|
||||
import_status = MigrationImport(slug=recipe.slug, name=recipe.name, status=status, exception=str(exception))
|
||||
self.migration_report.append(import_status)
|
@ -1,94 +1,46 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import yaml
|
||||
from fastapi.logger import logger
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.db.database import db
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.image.minify import migrate_images
|
||||
from mealie.utils.unzip import unpack_zip
|
||||
from mealie.schema.migration import MigrationImport
|
||||
from mealie.services.migrations import helpers
|
||||
from mealie.services.migrations._migration_base import MigrationAlias, MigrationBase
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
|
||||
class ChowdownMigration(MigrationBase):
|
||||
key_aliases: Optional[list[MigrationAlias]] = [
|
||||
MigrationAlias(key="name", alias="title", func=None),
|
||||
MigrationAlias(key="recipeIngredient", alias="ingredients", func=None),
|
||||
MigrationAlias(key="recipeInstructions", alias="directions", func=None),
|
||||
MigrationAlias(key="tags", alias="tags", func=helpers.split_by_comma),
|
||||
]
|
||||
|
||||
|
||||
def read_chowdown_file(recipe_file: Path) -> Recipe:
|
||||
"""Parse through the yaml file to try and pull out the relavent information.
|
||||
Some issues occur when ":" are used in the text. I have no put a lot of effort
|
||||
into this so there may be better ways of going about it. Currently, I get about 80-90%
|
||||
of recipes from repos I've tried.
|
||||
def migrate(session: Session, zip_path: Path) -> list[MigrationImport]:
|
||||
cd_migration = ChowdownMigration(migration_file=zip_path, session=session)
|
||||
|
||||
Args:
|
||||
recipe_file (Path): Path to the .yml file
|
||||
|
||||
Returns:
|
||||
Recipe: Recipe class object
|
||||
"""
|
||||
|
||||
with open(recipe_file, "r") as stream:
|
||||
recipe_description: str = str
|
||||
recipe_data: dict = {}
|
||||
try:
|
||||
for x, item in enumerate(yaml.load_all(stream, Loader=Loader)):
|
||||
if x == 0:
|
||||
recipe_data = item
|
||||
|
||||
elif x == 1:
|
||||
recipe_description = str(item)
|
||||
|
||||
except yaml.YAMLError:
|
||||
return
|
||||
|
||||
reformat_data = {
|
||||
"name": recipe_data.get("title"),
|
||||
"description": recipe_description,
|
||||
"image": recipe_data.get("image", ""),
|
||||
"recipeIngredient": recipe_data.get("ingredients"),
|
||||
"recipeInstructions": recipe_data.get("directions"),
|
||||
"tags": recipe_data.get("tags").split(","),
|
||||
}
|
||||
|
||||
reformated_list = [{"text": instruction} for instruction in reformat_data["recipeInstructions"]]
|
||||
|
||||
reformat_data["recipeInstructions"] = reformated_list
|
||||
|
||||
return Recipe(**reformat_data)
|
||||
|
||||
|
||||
def chowdown_migrate(session: Session, zip_file: Path):
|
||||
|
||||
temp_dir = unpack_zip(zip_file)
|
||||
|
||||
with temp_dir as dir:
|
||||
with cd_migration.temp_dir as dir:
|
||||
chow_dir = next(Path(dir).iterdir())
|
||||
image_dir = app_dirs.TEMP_DIR.joinpath(chow_dir, "images")
|
||||
recipe_dir = app_dirs.TEMP_DIR.joinpath(chow_dir, "_recipes")
|
||||
|
||||
failed_recipes = []
|
||||
successful_recipes = []
|
||||
for recipe in recipe_dir.glob("*.md"):
|
||||
try:
|
||||
new_recipe = read_chowdown_file(recipe)
|
||||
db.recipes.create(session, new_recipe.dict())
|
||||
successful_recipes.append(new_recipe.name)
|
||||
except Exception as inst:
|
||||
session.rollback()
|
||||
logger.error(inst)
|
||||
failed_recipes.append(recipe.stem)
|
||||
recipes_as_dicts = [y for x in recipe_dir.glob("*.md") if (y := ChowdownMigration.yaml_reader(x)) is not None]
|
||||
|
||||
failed_images = []
|
||||
for image in image_dir.iterdir():
|
||||
try:
|
||||
if image.stem not in failed_recipes:
|
||||
shutil.copy(image, app_dirs.IMG_DIR.joinpath(image.name))
|
||||
except Exception as inst:
|
||||
logger.error(inst)
|
||||
failed_images.append(image.name)
|
||||
report = {"successful": successful_recipes, "failed": failed_recipes}
|
||||
recipes = [cd_migration.clean_recipe_dictionary(x) for x in recipes_as_dicts]
|
||||
|
||||
migrate_images()
|
||||
return report
|
||||
cd_migration.import_recipes_to_database(recipes)
|
||||
|
||||
recipe_lookup = {r.slug: r for r in recipes}
|
||||
|
||||
for report in cd_migration.migration_report:
|
||||
if report.status:
|
||||
try:
|
||||
original_image = recipe_lookup.get(report.slug).image
|
||||
cd_image = image_dir.joinpath(original_image)
|
||||
except StopIteration:
|
||||
continue
|
||||
if cd_image:
|
||||
ChowdownMigration.import_image(cd_image, report.slug)
|
||||
|
||||
return cd_migration.migration_report
|
||||
|
12
mealie/services/migrations/helpers.py
Normal file
12
mealie/services/migrations/helpers.py
Normal file
@ -0,0 +1,12 @@
|
||||
def split_by_comma(tag_string: str):
|
||||
"""Splits a single string by ',' performs a line strip and then title cases the resulting string
|
||||
|
||||
Args:
|
||||
tag_string (str): [description]
|
||||
|
||||
Returns:
|
||||
[type]: [description]
|
||||
"""
|
||||
if not isinstance(tag_string, str):
|
||||
return None
|
||||
return [x.title().lstrip() for x in tag_string.split(",") if x != ""]
|
49
mealie/services/migrations/migration.py
Normal file
49
mealie/services/migrations/migration.py
Normal file
@ -0,0 +1,49 @@
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
from mealie.core import root_logger
|
||||
from mealie.schema.migration import MigrationImport
|
||||
from mealie.services.migrations import chowdown, nextcloud
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
class Migration(str, Enum):
|
||||
"""The class defining the supported types of migrations for Mealie. Pass the
|
||||
class attribute of the class instead of the string when using.
|
||||
"""
|
||||
|
||||
nextcloud = "nextcloud"
|
||||
chowdown = "chowdown"
|
||||
|
||||
|
||||
def migrate(migration_type: str, file_path: Path, session: Session) -> list[MigrationImport]:
|
||||
"""The new entry point for accessing migrations within the 'migrations' service.
|
||||
Using the 'Migrations' enum class as a selector for migration_type to direct which function
|
||||
to call. All migrations will return a MigrationImport object that is built for displaying
|
||||
detailed information on the frontend. This will provide a single point of access
|
||||
|
||||
Args:
|
||||
migration_type (str): a string option representing the migration type. See Migration attributes for options
|
||||
file_path (Path): Path to the zip file containing the data
|
||||
session (Session): a SqlAlchemy Session
|
||||
|
||||
Returns:
|
||||
list[MigrationImport]: [description]
|
||||
"""
|
||||
|
||||
logger.info(f"Starting Migration from {migration_type}")
|
||||
|
||||
if migration_type == Migration.nextcloud.value:
|
||||
migration_imports = nextcloud.migrate(session, file_path)
|
||||
|
||||
elif migration_type == Migration.chowdown.value:
|
||||
migration_imports = chowdown.migrate(session, file_path)
|
||||
|
||||
else:
|
||||
return []
|
||||
|
||||
logger.info(f"Finishing Migration from {migration_type}")
|
||||
|
||||
return migration_imports
|
@ -1,97 +1,69 @@
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.db.database import db
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.image import minify
|
||||
from mealie.services.scraper.cleaner import Cleaner
|
||||
from mealie.schema.migration import MigrationImport
|
||||
from mealie.services.migrations import helpers
|
||||
from mealie.services.migrations._migration_base import MigrationAlias, MigrationBase
|
||||
from slugify import slugify
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
|
||||
def process_selection(selection: Path) -> Path:
|
||||
if selection.is_dir():
|
||||
return selection
|
||||
elif selection.suffix == ".zip":
|
||||
with zipfile.ZipFile(selection, "r") as zip_ref:
|
||||
nextcloud_dir = app_dirs.TEMP_DIR.joinpath("nextcloud")
|
||||
nextcloud_dir.mkdir(exist_ok=False, parents=True)
|
||||
zip_ref.extractall(nextcloud_dir)
|
||||
return nextcloud_dir
|
||||
else:
|
||||
return None
|
||||
@dataclass
|
||||
class NextcloudDir:
|
||||
name: str
|
||||
recipe: dict
|
||||
image: Optional[Path]
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
return slugify(self.recipe.get("name"))
|
||||
|
||||
@classmethod
|
||||
def from_dir(cls, dir: Path):
|
||||
try:
|
||||
json_file = next(dir.glob("*.json"))
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
try: # TODO: There's got to be a better way to do this.
|
||||
image_file = next(dir.glob("full.*"))
|
||||
except StopIteration:
|
||||
image_file = None
|
||||
|
||||
return cls(name=dir.name, recipe=NextcloudMigration.json_reader(json_file), image=image_file)
|
||||
|
||||
|
||||
def clean_nextcloud_tags(nextcloud_tags: str):
|
||||
if not isinstance(nextcloud_tags, str):
|
||||
return None
|
||||
|
||||
return [x.title().lstrip() for x in nextcloud_tags.split(",") if x != ""]
|
||||
class NextcloudMigration(MigrationBase):
|
||||
key_aliases: Optional[list[MigrationAlias]] = [
|
||||
MigrationAlias(key="tags", alias="keywords", func=helpers.split_by_comma),
|
||||
MigrationAlias(key="orgURL", alias="url", func=None),
|
||||
]
|
||||
|
||||
|
||||
def import_recipes(recipe_dir: Path) -> Recipe:
|
||||
image = False
|
||||
def migrate(session: Session, zip_path: Path) -> list[MigrationImport]:
|
||||
|
||||
for file in recipe_dir.glob("full.*"):
|
||||
image = file
|
||||
break
|
||||
nc_migration = NextcloudMigration(migration_file=zip_path, session=session)
|
||||
|
||||
for file in recipe_dir.glob("*.json"):
|
||||
recipe_file = file
|
||||
break
|
||||
with nc_migration.temp_dir as dir:
|
||||
potential_recipe_dirs = NextcloudMigration.glob_walker(dir, glob_str="**/[!.]*.json", return_parent=True)
|
||||
|
||||
with open(recipe_file, "r") as f:
|
||||
recipe_dict = json.loads(f.read())
|
||||
# nextcloud_dirs = [NextcloudDir.from_dir(x) for x in potential_recipe_dirs]
|
||||
nextcloud_dirs = {y.slug: y for x in potential_recipe_dirs if (y := NextcloudDir.from_dir(x))}
|
||||
# nextcloud_dirs = {x.slug: x for x in nextcloud_dirs}
|
||||
|
||||
recipe_data = Cleaner.clean(recipe_dict)
|
||||
all_recipes = []
|
||||
for _, nc_dir in nextcloud_dirs.items():
|
||||
recipe = nc_migration.clean_recipe_dictionary(nc_dir.recipe)
|
||||
all_recipes.append(recipe)
|
||||
|
||||
image_name = recipe_data["slug"]
|
||||
recipe_data["image"] = recipe_data["slug"]
|
||||
recipe_data["tags"] = clean_nextcloud_tags(recipe_data.get("keywords"))
|
||||
nc_migration.import_recipes_to_database(all_recipes)
|
||||
|
||||
recipe = Recipe(**recipe_data)
|
||||
for report in nc_migration.migration_report:
|
||||
|
||||
if image:
|
||||
shutil.copy(image, app_dirs.IMG_DIR.joinpath(image_name + image.suffix))
|
||||
if report.status:
|
||||
nc_dir: NextcloudDir = nextcloud_dirs[report.slug]
|
||||
if nc_dir.image:
|
||||
NextcloudMigration.import_image(nc_dir.image, nc_dir.slug)
|
||||
|
||||
return recipe
|
||||
|
||||
|
||||
def prep():
|
||||
shutil.rmtree(app_dirs.TEMP_DIR, ignore_errors=True)
|
||||
app_dirs.TEMP_DIR.mkdir(exist_ok=True, parents=True)
|
||||
|
||||
|
||||
def cleanup():
|
||||
shutil.rmtree(app_dirs.TEMP_DIR)
|
||||
|
||||
|
||||
def migrate(session, selection: str):
|
||||
prep()
|
||||
app_dirs.MIGRATION_DIR.mkdir(exist_ok=True)
|
||||
selection = app_dirs.MIGRATION_DIR.joinpath(selection)
|
||||
|
||||
nextcloud_dir = process_selection(selection)
|
||||
|
||||
successful_imports = []
|
||||
failed_imports = []
|
||||
for dir in nextcloud_dir.iterdir():
|
||||
if dir.is_dir():
|
||||
|
||||
try:
|
||||
recipe = import_recipes(dir)
|
||||
db.recipes.create(session, recipe.dict())
|
||||
|
||||
successful_imports.append(recipe.name)
|
||||
except Exception:
|
||||
session.rollback()
|
||||
logging.error(f"Failed Nextcloud Import: {dir.name}")
|
||||
logging.exception("")
|
||||
failed_imports.append(dir.name)
|
||||
|
||||
cleanup()
|
||||
minify.migrate_images()
|
||||
|
||||
return {"successful": successful_imports, "failed": failed_imports}
|
||||
return nc_migration.migration_report
|
||||
|
@ -1,15 +1,18 @@
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from mealie.core import root_logger
|
||||
from mealie.db.database import db
|
||||
from mealie.db.db_setup import create_session
|
||||
from fastapi.logger import logger
|
||||
from mealie.schema.user import GroupInDB
|
||||
from mealie.services.backups.exports import auto_backup_job
|
||||
from mealie.services.scheduler.global_scheduler import scheduler
|
||||
from mealie.services.scheduler.scheduler_utils import Cron, cron_parser
|
||||
from mealie.utils.post_webhooks import post_webhooks
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
# TODO Fix Scheduler
|
||||
|
||||
|
||||
@scheduler.scheduled_job(trigger="interval", minutes=30)
|
||||
def update_webhook_schedule():
|
||||
"""
|
||||
|
@ -128,8 +128,10 @@ class Cleaner:
|
||||
|
||||
@staticmethod
|
||||
def ingredient(ingredients: list) -> str:
|
||||
|
||||
return [Cleaner.html(html.unescape(ing)) for ing in ingredients]
|
||||
if ingredients:
|
||||
return [Cleaner.html(html.unescape(ing)) for ing in ingredients]
|
||||
else:
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def yield_amount(yld) -> str:
|
||||
|
@ -3,15 +3,17 @@ from typing import List
|
||||
|
||||
import requests
|
||||
import scrape_schema_recipe
|
||||
from mealie.core import root_logger
|
||||
from mealie.core.config import app_dirs
|
||||
from fastapi.logger import logger
|
||||
from mealie.services.image.image import scrape_image
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.image.image import scrape_image
|
||||
from mealie.services.scraper import open_graph
|
||||
from mealie.services.scraper.cleaner import Cleaner
|
||||
|
||||
LAST_JSON = app_dirs.DEBUG_DIR.joinpath("last_recipe.json")
|
||||
|
||||
logger = root_logger.get_logger()
|
||||
|
||||
|
||||
def create_from_url(url: str) -> Recipe:
|
||||
"""Main entry point for generating a recipe from a URL. Pass in a URL and
|
||||
|
@ -9,7 +9,6 @@ from tests.app_routes import AppRoutes
|
||||
from tests.test_config import TEST_CHOWDOWN_DIR, TEST_NEXTCLOUD_DIR
|
||||
|
||||
|
||||
# Chowdown
|
||||
@pytest.fixture(scope="session")
|
||||
def chowdown_zip():
|
||||
zip = TEST_CHOWDOWN_DIR.joinpath("test_chowdown-gh-pages.zip")
|
||||
@ -42,14 +41,10 @@ def test_import_chowdown_directory(api_client: TestClient, api_routes: AppRoutes
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
report = json.loads(response.content)
|
||||
assert report["failed"] == []
|
||||
reports = json.loads(response.content)
|
||||
|
||||
expected_slug = "roasted-okra"
|
||||
|
||||
recipe_url = api_routes.recipes_recipe_slug(expected_slug)
|
||||
response = api_client.get(recipe_url)
|
||||
assert response.status_code == 200
|
||||
for report in reports:
|
||||
assert report.get("status") is True
|
||||
|
||||
|
||||
def test_delete_chowdown_migration_data(api_client: TestClient, api_routes: AppRoutes, chowdown_zip: Path, token):
|
||||
@ -91,13 +86,9 @@ def test_import_nextcloud_directory(api_client: TestClient, api_routes: AppRoute
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
report = json.loads(response.content)
|
||||
assert report["failed"] == []
|
||||
|
||||
expected_slug = "air-fryer-shrimp"
|
||||
recipe_url = api_routes.recipes_recipe_slug(expected_slug)
|
||||
response = api_client.get(recipe_url)
|
||||
assert response.status_code == 200
|
||||
reports = json.loads(response.content)
|
||||
for report in reports:
|
||||
assert report.get("status") is True
|
||||
|
||||
|
||||
def test_delete__nextcloud_migration_data(api_client: TestClient, api_routes: AppRoutes, nextcloud_zip: Path, token):
|
||||
|
@ -80,7 +80,7 @@ def test_cleaner_instructions(instructions):
|
||||
def test_html_with_recipe_data():
|
||||
path = TEST_RAW_HTML.joinpath("healthy_pasta_bake_60759.html")
|
||||
url = "https://www.bbc.co.uk/food/recipes/healthy_pasta_bake_60759"
|
||||
recipe_data = extract_recipe_from_html(open(path,encoding="utf8").read(), url)
|
||||
recipe_data = extract_recipe_from_html(open(path, encoding="utf8").read(), url)
|
||||
|
||||
assert len(recipe_data["name"]) > 10
|
||||
assert len(recipe_data["slug"]) > 10
|
||||
|
@ -29,7 +29,7 @@ def test_non_default_settings(monkeypatch):
|
||||
monkeypatch.setenv("DEFAULT_GROUP", "Test Group")
|
||||
monkeypatch.setenv("DEFAULT_PASSWORD", "Test Password")
|
||||
monkeypatch.setenv("API_PORT", "8000")
|
||||
monkeypatch.setenv("API_DOCS", 'False')
|
||||
monkeypatch.setenv("API_DOCS", "False")
|
||||
|
||||
app_settings = AppSettings()
|
||||
|
||||
|
@ -1,40 +1,39 @@
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
# import shutil
|
||||
# from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from mealie.core.config import app_dirs
|
||||
from mealie.schema.recipe import Recipe
|
||||
from mealie.services.migrations.nextcloud import cleanup, import_recipes, prep, process_selection
|
||||
from tests.test_config import TEST_NEXTCLOUD_DIR
|
||||
# import pytest
|
||||
# from mealie.core.config import app_dirs
|
||||
# from mealie.schema.recipe import Recipe
|
||||
# from tests.test_config import TEST_NEXTCLOUD_DIR
|
||||
|
||||
CWD = Path(__file__).parent
|
||||
TEST_NEXTCLOUD_DIR
|
||||
TEMP_NEXTCLOUD = app_dirs.TEMP_DIR.joinpath("nextcloud")
|
||||
# CWD = Path(__file__).parent
|
||||
# TEST_NEXTCLOUD_DIR
|
||||
# TEMP_NEXTCLOUD = app_dirs.TEMP_DIR.joinpath("nextcloud")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"file_name,final_path",
|
||||
[("nextcloud.zip", TEMP_NEXTCLOUD)],
|
||||
)
|
||||
def test_zip_extraction(file_name: str, final_path: Path):
|
||||
prep()
|
||||
zip = TEST_NEXTCLOUD_DIR.joinpath(file_name)
|
||||
dir = process_selection(zip)
|
||||
# @pytest.mark.parametrize(
|
||||
# "file_name,final_path",
|
||||
# [("nextcloud.zip", TEMP_NEXTCLOUD)],
|
||||
# )
|
||||
# def test_zip_extraction(file_name: str, final_path: Path):
|
||||
# prep()
|
||||
# zip = TEST_NEXTCLOUD_DIR.joinpath(file_name)
|
||||
# dir = process_selection(zip)
|
||||
|
||||
assert dir == final_path
|
||||
cleanup()
|
||||
assert dir.exists() is False
|
||||
# assert dir == final_path
|
||||
# cleanup()
|
||||
# assert dir.exists() is False
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"recipe_dir",
|
||||
[
|
||||
TEST_NEXTCLOUD_DIR.joinpath("Air Fryer Shrimp"),
|
||||
TEST_NEXTCLOUD_DIR.joinpath("Chicken Parmigiana"),
|
||||
TEST_NEXTCLOUD_DIR.joinpath("Skillet Shepherd's Pie"),
|
||||
],
|
||||
)
|
||||
def test_nextcloud_migration(recipe_dir: Path):
|
||||
recipe = import_recipes(recipe_dir)
|
||||
assert isinstance(recipe, Recipe)
|
||||
shutil.rmtree(app_dirs.IMG_DIR.joinpath(recipe.image), ignore_errors=True)
|
||||
# @pytest.mark.parametrize(
|
||||
# "recipe_dir",
|
||||
# [
|
||||
# TEST_NEXTCLOUD_DIR.joinpath("Air Fryer Shrimp"),
|
||||
# TEST_NEXTCLOUD_DIR.joinpath("Chicken Parmigiana"),
|
||||
# TEST_NEXTCLOUD_DIR.joinpath("Skillet Shepherd's Pie"),
|
||||
# ],
|
||||
# )
|
||||
# def test_nextcloud_migration(recipe_dir: Path):
|
||||
# recipe = import_recipes(recipe_dir)
|
||||
# assert isinstance(recipe, Recipe)
|
||||
# shutil.rmtree(app_dirs.IMG_DIR.joinpath(recipe.image), ignore_errors=True)
|
||||
|
Loading…
x
Reference in New Issue
Block a user