mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-07-09 03:04:54 -04:00
feat: Add recipekeeper migration (#3642)
This commit is contained in:
parent
a7fcb6c84d
commit
f4f2b863e0
@ -379,6 +379,10 @@
|
||||
"myrecipebox": {
|
||||
"title": "My Recipe Box",
|
||||
"description-long": "Mealie can import recipes from My Recipe Box. Export your recipes in CSV format, then upload the .csv file below."
|
||||
},
|
||||
"recipekeeper": {
|
||||
"title": "Recipe Keeper",
|
||||
"description-long": "Mealie can import recipes from Recipe Keeper. Export your recipes in zip format, then upload the .zip file below."
|
||||
}
|
||||
},
|
||||
"new-recipe": {
|
||||
|
@ -16,7 +16,8 @@ export type SupportedMigrations =
|
||||
| "paprika"
|
||||
| "mealie_alpha"
|
||||
| "tandoor"
|
||||
| "plantoeat";
|
||||
| "plantoeat"
|
||||
| "recipekeeper";
|
||||
|
||||
export interface CreateGroupPreferences {
|
||||
privateGroup?: boolean;
|
||||
|
@ -82,6 +82,7 @@ const MIGRATIONS = {
|
||||
nextcloud: "nextcloud",
|
||||
paprika: "paprika",
|
||||
plantoeat: "plantoeat",
|
||||
recipekeeper: "recipekeeper",
|
||||
tandoor: "tandoor",
|
||||
};
|
||||
|
||||
@ -135,6 +136,10 @@ export default defineComponent({
|
||||
text: i18n.tc("migration.tandoor.title"),
|
||||
value: MIGRATIONS.tandoor,
|
||||
},
|
||||
{
|
||||
text: i18n.tc("migration.recipekeeper.title"),
|
||||
value: MIGRATIONS.recipekeeper,
|
||||
},
|
||||
];
|
||||
const _content = {
|
||||
[MIGRATIONS.mealie]: {
|
||||
@ -347,6 +352,26 @@ export default defineComponent({
|
||||
}
|
||||
],
|
||||
},
|
||||
[MIGRATIONS.recipekeeper]: {
|
||||
text: i18n.tc("migration.recipekeeper.description-long"),
|
||||
acceptedFileType: ".zip",
|
||||
tree: [
|
||||
{
|
||||
id: 1,
|
||||
icon: $globals.icons.zip,
|
||||
name: "recipekeeperhtml.zip",
|
||||
children: [
|
||||
{ id: 2, name: "recipes.html", icon: $globals.icons.codeJson },
|
||||
{ id: 3, name: "images", icon: $globals.icons.folderOutline,
|
||||
children: [
|
||||
{ id: 4, name: "image1.jpeg", icon: $globals.icons.fileImage },
|
||||
{ id: 5, name: "image2.jpeg", icon: $globals.icons.fileImage },
|
||||
]
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
function setFileObject(fileObject: File) {
|
||||
|
@ -17,6 +17,7 @@ from mealie.services.migrations import (
|
||||
NextcloudMigrator,
|
||||
PaprikaMigrator,
|
||||
PlanToEatMigrator,
|
||||
RecipeKeeperMigrator,
|
||||
TandoorMigrator,
|
||||
)
|
||||
|
||||
@ -56,6 +57,7 @@ class GroupMigrationController(BaseUserController):
|
||||
SupportedMigrations.tandoor: TandoorMigrator,
|
||||
SupportedMigrations.plantoeat: PlanToEatMigrator,
|
||||
SupportedMigrations.myrecipebox: MyRecipeBoxMigrator,
|
||||
SupportedMigrations.recipekeeper: RecipeKeeperMigrator,
|
||||
}
|
||||
|
||||
constructor = table.get(migration_type, None)
|
||||
|
@ -12,6 +12,7 @@ class SupportedMigrations(str, enum.Enum):
|
||||
tandoor = "tandoor"
|
||||
plantoeat = "plantoeat"
|
||||
myrecipebox = "myrecipebox"
|
||||
recipekeeper = "recipekeeper"
|
||||
|
||||
|
||||
class DataMigrationCreate(MealieModel):
|
||||
|
@ -5,4 +5,5 @@ from .myrecipebox import *
|
||||
from .nextcloud import *
|
||||
from .paprika import *
|
||||
from .plantoeat import *
|
||||
from .recipekeeper import *
|
||||
from .tandoor import *
|
||||
|
@ -1,19 +1,21 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
import isodate
|
||||
from isodate.isoerror import ISO8601Error
|
||||
from slugify import slugify
|
||||
|
||||
from mealie.schema.reports.reports import ReportEntryCreate
|
||||
|
||||
from ._migration_base import BaseMigrator
|
||||
from .utils.migration_alias import MigrationAlias
|
||||
from .utils.migration_helpers import MigrationReaders, glob_walker, import_image, split_by_comma
|
||||
from .utils.migration_helpers import (
|
||||
MigrationReaders,
|
||||
glob_walker,
|
||||
import_image,
|
||||
parse_iso8601_duration,
|
||||
split_by_comma,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -50,9 +52,9 @@ class NextcloudMigrator(BaseMigrator):
|
||||
self.key_aliases = [
|
||||
MigrationAlias(key="tags", alias="keywords", func=split_by_comma),
|
||||
MigrationAlias(key="orgURL", alias="url", func=None),
|
||||
MigrationAlias(key="totalTime", alias="totalTime", func=parse_time),
|
||||
MigrationAlias(key="prepTime", alias="prepTime", func=parse_time),
|
||||
MigrationAlias(key="performTime", alias="cookTime", func=parse_time),
|
||||
MigrationAlias(key="totalTime", alias="totalTime", func=parse_iso8601_duration),
|
||||
MigrationAlias(key="prepTime", alias="prepTime", func=parse_iso8601_duration),
|
||||
MigrationAlias(key="performTime", alias="cookTime", func=parse_iso8601_duration),
|
||||
]
|
||||
|
||||
def _migrate(self) -> None:
|
||||
@ -89,45 +91,3 @@ class NextcloudMigrator(BaseMigrator):
|
||||
nc_dir = nextcloud_dirs[slug]
|
||||
if nc_dir.image:
|
||||
import_image(nc_dir.image, recipe_id)
|
||||
|
||||
|
||||
def parse_time(time: str | None) -> str:
|
||||
"""
|
||||
Parses an ISO8601 duration string
|
||||
|
||||
https://en.wikipedia.org/wiki/ISO_8601#Durations
|
||||
"""
|
||||
|
||||
if not time:
|
||||
return ""
|
||||
if time[0] == "P":
|
||||
try:
|
||||
delta = isodate.parse_duration(time)
|
||||
if not isinstance(delta, timedelta):
|
||||
return time
|
||||
except ISO8601Error:
|
||||
return time
|
||||
|
||||
# TODO: make singular and plural translatable
|
||||
time_part_map = {
|
||||
"days": {"singular": "day", "plural": "days"},
|
||||
"hours": {"singular": "hour", "plural": "hours"},
|
||||
"minutes": {"singular": "minute", "plural": "minutes"},
|
||||
"seconds": {"singular": "second", "plural": "seconds"},
|
||||
}
|
||||
|
||||
delta = cast(timedelta, delta)
|
||||
time_part_map["days"]["value"] = delta.days
|
||||
time_part_map["hours"]["value"] = delta.seconds // 3600
|
||||
time_part_map["minutes"]["value"] = (delta.seconds // 60) % 60
|
||||
time_part_map["seconds"]["value"] = delta.seconds % 60
|
||||
|
||||
return_strings: list[str] = []
|
||||
for value_map in time_part_map.values():
|
||||
if not (value := value_map["value"]):
|
||||
continue
|
||||
|
||||
unit_key = "singular" if value == 1 else "plural"
|
||||
return_strings.append(f"{value} {value_map[unit_key]}")
|
||||
|
||||
return " ".join(return_strings) if return_strings else time
|
||||
|
99
mealie/services/migrations/recipekeeper.py
Normal file
99
mealie/services/migrations/recipekeeper.py
Normal file
@ -0,0 +1,99 @@
|
||||
import tempfile
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from mealie.services.scraper import cleaner
|
||||
|
||||
from ._migration_base import BaseMigrator
|
||||
from .utils.migration_alias import MigrationAlias
|
||||
from .utils.migration_helpers import import_image, parse_iso8601_duration
|
||||
|
||||
|
||||
def parse_recipe_div(recipe, image_path):
|
||||
meta = {}
|
||||
for item in recipe.find_all(lambda x: x.has_attr("itemprop")):
|
||||
if item.name == "meta":
|
||||
meta[item["itemprop"]] = item["content"]
|
||||
elif item.name == "div":
|
||||
meta[item["itemprop"]] = list(item.stripped_strings)
|
||||
elif item.name == "img":
|
||||
meta[item["itemprop"]] = str(image_path / item["src"])
|
||||
else:
|
||||
meta[item["itemprop"]] = item.string
|
||||
# merge nutrition keys into their own dict.
|
||||
nutrition = {}
|
||||
for k in meta:
|
||||
if k.startswith("recipeNut"):
|
||||
nutrition[k.removeprefix("recipeNut")] = meta[k].strip()
|
||||
meta["nutrition"] = nutrition
|
||||
return meta
|
||||
|
||||
|
||||
def get_value_as_string_or_none(dictionary: dict, key: str):
|
||||
value = dictionary.get(key)
|
||||
if value is not None:
|
||||
try:
|
||||
return str(value)
|
||||
except Exception:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def to_list(x: list[str] | str) -> list[str]:
|
||||
if isinstance(x, str):
|
||||
return [x]
|
||||
return x
|
||||
|
||||
|
||||
class RecipeKeeperMigrator(BaseMigrator):
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self.name = "recipekeeper"
|
||||
|
||||
self.key_aliases = [
|
||||
MigrationAlias(
|
||||
key="recipeIngredient",
|
||||
alias="recipeIngredients",
|
||||
),
|
||||
MigrationAlias(key="recipeInstructions", alias="recipeDirections"),
|
||||
MigrationAlias(key="performTime", alias="cookTime", func=parse_iso8601_duration),
|
||||
MigrationAlias(key="prepTime", alias="prepTime", func=parse_iso8601_duration),
|
||||
MigrationAlias(key="image", alias="photo0"),
|
||||
MigrationAlias(key="tags", alias="recipeCourse", func=to_list),
|
||||
MigrationAlias(key="recipe_category", alias="recipeCategory", func=to_list),
|
||||
MigrationAlias(key="notes", alias="recipeNotes"),
|
||||
MigrationAlias(key="nutrition", alias="nutrition", func=cleaner.clean_nutrition),
|
||||
MigrationAlias(key="rating", alias="recipeRating"),
|
||||
MigrationAlias(key="orgURL", alias="recipeSource"),
|
||||
MigrationAlias(key="recipe_yield", alias="recipeYield"),
|
||||
]
|
||||
|
||||
def _migrate(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with zipfile.ZipFile(self.archive) as zip_file:
|
||||
zip_file.extractall(tmpdir)
|
||||
|
||||
source_dir = Path(tmpdir) / "recipekeeperhtml"
|
||||
|
||||
recipes_as_dicts: list[dict] = []
|
||||
with open(source_dir / "recipes.html") as fp:
|
||||
soup = BeautifulSoup(fp, "lxml")
|
||||
for recipe_div in soup.body.find_all("div", "recipe-details"):
|
||||
recipes_as_dicts.append(parse_recipe_div(recipe_div, source_dir))
|
||||
|
||||
recipes = [self.clean_recipe_dictionary(x) for x in recipes_as_dicts]
|
||||
results = self.import_recipes_to_database(recipes)
|
||||
for (_, recipe_id, status), recipe in zip(results, recipes, strict=False):
|
||||
if status:
|
||||
try:
|
||||
if not recipe or not recipe.image:
|
||||
continue
|
||||
|
||||
except StopIteration:
|
||||
continue
|
||||
|
||||
import_image(recipe.image, recipe_id)
|
@ -1,6 +1,9 @@
|
||||
import json
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import cast
|
||||
|
||||
import isodate
|
||||
import yaml
|
||||
from PIL import UnidentifiedImageError
|
||||
from pydantic import UUID4
|
||||
@ -132,3 +135,45 @@ async def scrape_image(image_url: str, recipe_id: UUID4):
|
||||
await data_service.scrape_image(image_url)
|
||||
except UnidentifiedImageError:
|
||||
return
|
||||
|
||||
|
||||
def parse_iso8601_duration(time: str | None) -> str:
|
||||
"""
|
||||
Parses an ISO8601 duration string
|
||||
|
||||
https://en.wikipedia.org/wiki/ISO_8601#Durations
|
||||
"""
|
||||
|
||||
if not time:
|
||||
return ""
|
||||
if time[0] == "P":
|
||||
try:
|
||||
delta = isodate.parse_duration(time)
|
||||
if not isinstance(delta, timedelta):
|
||||
return time
|
||||
except isodate.ISO8601Error:
|
||||
return time
|
||||
|
||||
# TODO: make singular and plural translatable
|
||||
time_part_map = {
|
||||
"days": {"singular": "day", "plural": "days"},
|
||||
"hours": {"singular": "hour", "plural": "hours"},
|
||||
"minutes": {"singular": "minute", "plural": "minutes"},
|
||||
"seconds": {"singular": "second", "plural": "seconds"},
|
||||
}
|
||||
|
||||
delta = cast(timedelta, delta)
|
||||
time_part_map["days"]["value"] = delta.days
|
||||
time_part_map["hours"]["value"] = delta.seconds // 3600
|
||||
time_part_map["minutes"]["value"] = (delta.seconds // 60) % 60
|
||||
time_part_map["seconds"]["value"] = delta.seconds % 60
|
||||
|
||||
return_strings: list[str] = []
|
||||
for value_map in time_part_map.values():
|
||||
if not (value := value_map["value"]):
|
||||
continue
|
||||
|
||||
unit_key = "singular" if value == 1 else "plural"
|
||||
return_strings.append(f"{value} {value_map[unit_key]}")
|
||||
|
||||
return " ".join(return_strings) if return_strings else time
|
||||
|
@ -38,6 +38,8 @@ migrations_plantoeat = CWD / "migrations/plantoeat.zip"
|
||||
|
||||
migrations_myrecipebox = CWD / "migrations/myrecipebox.csv"
|
||||
|
||||
migrations_recipekeeper = CWD / "migrations/recipekeeper.zip"
|
||||
|
||||
images_test_image_1 = CWD / "images/test-image-1.jpg"
|
||||
|
||||
images_test_image_2 = CWD / "images/test-image-2.png"
|
||||
|
BIN
tests/data/migrations/recipekeeper.zip
Normal file
BIN
tests/data/migrations/recipekeeper.zip
Normal file
Binary file not shown.
@ -30,6 +30,7 @@ test_cases = [
|
||||
MigrationTestData(typ=SupportedMigrations.tandoor, archive=test_data.migrations_tandoor),
|
||||
MigrationTestData(typ=SupportedMigrations.plantoeat, archive=test_data.migrations_plantoeat),
|
||||
MigrationTestData(typ=SupportedMigrations.myrecipebox, archive=test_data.migrations_myrecipebox),
|
||||
MigrationTestData(typ=SupportedMigrations.recipekeeper, archive=test_data.migrations_recipekeeper),
|
||||
]
|
||||
|
||||
test_ids = [
|
||||
@ -41,6 +42,7 @@ test_ids = [
|
||||
"tandoor_archive",
|
||||
"plantoeat_archive",
|
||||
"myrecipebox_csv",
|
||||
"recipekeeper_archive",
|
||||
]
|
||||
|
||||
|
||||
@ -55,7 +57,10 @@ def test_recipe_migration(api_client: TestClient, unique_user: TestUser, mig: Mi
|
||||
}
|
||||
|
||||
response = api_client.post(
|
||||
api_routes.groups_migrations, data=payload, files=file_payload, headers=unique_user.token
|
||||
api_routes.groups_migrations,
|
||||
data=payload,
|
||||
files=file_payload,
|
||||
headers=unique_user.token,
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
@ -117,7 +122,10 @@ def test_bad_mealie_alpha_data_is_ignored(api_client: TestClient, unique_user: T
|
||||
}
|
||||
|
||||
response = api_client.post(
|
||||
api_routes.groups_migrations, data=payload, files=file_payload, headers=unique_user.token
|
||||
api_routes.groups_migrations,
|
||||
data=payload,
|
||||
files=file_payload,
|
||||
headers=unique_user.token,
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
Loading…
x
Reference in New Issue
Block a user