diff --git a/frontend/api/class-interfaces/recipes.ts b/frontend/api/class-interfaces/recipes.ts index aa6441cfcd37..175ca78b1b2e 100644 --- a/frontend/api/class-interfaces/recipes.ts +++ b/frontend/api/class-interfaces/recipes.ts @@ -1,4 +1,6 @@ import { BaseCRUDAPI } from "../_base"; +import { Category } from "./categories"; +import { Tag } from "./tags"; import { Recipe, CreateRecipe } from "~/types/api-types/recipe"; const prefix = "/api"; @@ -8,6 +10,7 @@ const routes = { recipesBase: `${prefix}/recipes`, recipesTestScrapeUrl: `${prefix}/recipes/test-scrape-url`, recipesCreateUrl: `${prefix}/recipes/create-url`, + recipesCreateUrlBulk: `${prefix}/recipes/create-url/bulk`, recipesCreateFromZip: `${prefix}/recipes/create-from-zip`, recipesCategory: `${prefix}/recipes/category`, recipesParseIngredient: `${prefix}/parser/ingredient`, @@ -59,6 +62,16 @@ export interface ParsedIngredient { ingredient: Ingredient; } +export interface BulkCreateRecipe { + url: string; + categories: Category[]; + tags: Tag[]; +} + +export interface BulkCreatePayload { + imports: BulkCreateRecipe[]; +} + export class RecipeAPI extends BaseCRUDAPI { baseRoute: string = routes.recipesBase; itemRoute = routes.recipesRecipeSlug; @@ -90,6 +103,10 @@ export class RecipeAPI extends BaseCRUDAPI { return await this.requests.post(routes.recipesCreateUrl, { url }); } + async createManyByUrl(payload: BulkCreatePayload) { + return await this.requests.post(routes.recipesCreateUrlBulk, payload); + } + // Recipe Comments // Methods to Generate reference urls for assets/images * diff --git a/frontend/components/Domain/Recipe/RecipeCategoryTagSelector.vue b/frontend/components/Domain/Recipe/RecipeCategoryTagSelector.vue index 35faeddd5ce3..c7874b4a8669 100644 --- a/frontend/components/Domain/Recipe/RecipeCategoryTagSelector.vue +++ b/frontend/components/Domain/Recipe/RecipeCategoryTagSelector.vue @@ -1,3 +1,5 @@ +//TODO: Prevent fetching Categories/Tags multiple time when selector is on page multiple times + @@ -204,10 +283,12 @@ import { defineComponent, reactive, toRefs, ref, useRouter, useContext } from "@ // @ts-ignore No Types for v-jsoneditor import VJsoneditor from "v-jsoneditor"; import { useApiSingleton } from "~/composables/use-api"; +import RecipeCategoryTagSelector from "~/components/Domain/Recipe/RecipeCategoryTagSelector.vue"; import { validators } from "~/composables/use-validators"; import { Recipe } from "~/types/api-types/recipe"; +import { alert } from "~/composables/use-toast"; export default defineComponent({ - components: { VJsoneditor }, + components: { VJsoneditor, RecipeCategoryTagSelector }, setup() { const state = reactive({ error: false, @@ -233,6 +314,11 @@ export default defineComponent({ text: "Import with .zip", value: "zip", }, + { + icon: $globals.icons.link, + text: "Bulk URL Import", + value: "bulk", + }, { icon: $globals.icons.robot, text: "Debug Scraper", @@ -249,7 +335,6 @@ export default defineComponent({ state.loading = false; return; } - console.log(response); router.push(`/recipe/${response.data}`); } @@ -300,7 +385,6 @@ export default defineComponent({ return; } const { response } = await api.recipes.createOne({ name }); - console.log("Create By Name Func", response); handleResponse(response); } @@ -318,11 +402,31 @@ export default defineComponent({ formData.append(newRecipeZipFileName, newRecipeZip.value); const { response } = await api.upload.file("/api/recipes/create-from-zip", formData); - console.log(response); handleResponse(response); } + // =================================================== + // Bulk Importer + + const bulkUrls = ref([{ url: "", categories: [], tags: [] }]); + + async function bulkCreate() { + if (bulkUrls.value.length === 0) { + return; + } + + const { response } = await api.recipes.createManyByUrl({ imports: bulkUrls.value }); + + if (response?.status === 202) { + alert.success("Bulk Import process has started"); + } else { + alert.error("Bulk import process has failed"); + } + } + return { + bulkCreate, + bulkUrls, debugTreeView, tabs, domCreateByName, diff --git a/frontend/pages/recipes/all.vue b/frontend/pages/recipes/all.vue index 0e25d1c8645c..d035725ced4c 100644 --- a/frontend/pages/recipes/all.vue +++ b/frontend/pages/recipes/all.vue @@ -21,7 +21,7 @@ import { useLazyRecipes } from "~/composables/use-recipes"; export default defineComponent({ components: { RecipeCardSection }, setup() { - const start = ref(1); + const start = ref(0); const limit = ref(30); const increment = ref(30); const ready = ref(false); diff --git a/mealie/db/data_initialization/init_units_foods.py b/mealie/db/data_initialization/init_units_foods.py index 38821b52d25e..be37c58a9572 100644 --- a/mealie/db/data_initialization/init_units_foods.py +++ b/mealie/db/data_initialization/init_units_foods.py @@ -1,9 +1,11 @@ import json from pathlib import Path +from mealie.core.root_logger import get_logger from mealie.db.data_access_layer.access_model_factory import Database CWD = Path(__file__).parent +logger = get_logger(__name__) def get_default_foods(): @@ -23,10 +25,10 @@ def default_recipe_unit_init(db: Database) -> None: try: db.ingredient_units.create(unit) except Exception as e: - print(e) + logger.error(e) for food in get_default_foods(): try: db.ingredient_foods.create(food) except Exception as e: - print(e) + logger.error(e) diff --git a/mealie/routes/admin/admin_email.py b/mealie/routes/admin/admin_email.py index b3fe20f8c475..921e1c1e2c13 100644 --- a/mealie/routes/admin/admin_email.py +++ b/mealie/routes/admin/admin_email.py @@ -33,7 +33,6 @@ async def check_email_config(): @router.post("", response_model=EmailSuccess) async def send_test_email(data: EmailTest): - print(data) service = EmailService() status = False error = None diff --git a/mealie/routes/recipe/image_and_assets.py b/mealie/routes/recipe/image_and_assets.py index eb3d21e63b88..f76ae784054d 100644 --- a/mealie/routes/recipe/image_and_assets.py +++ b/mealie/routes/recipe/image_and_assets.py @@ -8,17 +8,14 @@ from sqlalchemy.orm.session import Session from mealie.db.database import get_database from mealie.db.db_setup import generate_session from mealie.routes.routers import UserAPIRouter -from mealie.schema.recipe import CreateRecipeByURL, Recipe, RecipeAsset +from mealie.schema.recipe import CreateRecipeByUrl, Recipe, RecipeAsset from mealie.services.image.image import scrape_image, write_image user_router = UserAPIRouter() @user_router.post("/{slug}/image") -def scrape_image_url( - slug: str, - url: CreateRecipeByURL, -): +def scrape_image_url(slug: str, url: CreateRecipeByUrl): """ Removes an existing image and replaces it with the incoming file. """ scrape_image(url.url, slug) diff --git a/mealie/routes/recipe/recipe_crud_routes.py b/mealie/routes/recipe/recipe_crud_routes.py index e18778346ec8..2491ba1a58c3 100644 --- a/mealie/routes/recipe/recipe_crud_routes.py +++ b/mealie/routes/recipe/recipe_crud_routes.py @@ -12,10 +12,12 @@ from mealie.core.root_logger import get_logger from mealie.db.database import get_database from mealie.db.db_setup import generate_session from mealie.routes.routers import UserAPIRouter -from mealie.schema.recipe import CreateRecipeByURL, Recipe, RecipeImageTypes -from mealie.schema.recipe.recipe import CreateRecipe, RecipeSummary +from mealie.schema.recipe import CreateRecipeByUrl, Recipe, RecipeImageTypes +from mealie.schema.recipe.recipe import CreateRecipe, CreateRecipeByUrlBulk, RecipeSummary +from mealie.schema.server.tasks import ServerTaskNames from mealie.services.recipe.recipe_service import RecipeService from mealie.services.scraper.scraper import create_from_url, scrape_from_url +from mealie.services.server_tasks.background_executory import BackgroundExecutor user_router = UserAPIRouter() logger = get_logger() @@ -34,15 +36,55 @@ def create_from_name(data: CreateRecipe, recipe_service: RecipeService = Depends @user_router.post("/create-url", status_code=201, response_model=str) -def parse_recipe_url(url: CreateRecipeByURL, recipe_service: RecipeService = Depends(RecipeService.private)): +def parse_recipe_url(url: CreateRecipeByUrl, recipe_service: RecipeService = Depends(RecipeService.private)): """ Takes in a URL and attempts to scrape data and load it into the database """ - recipe = create_from_url(url.url) return recipe_service.create_one(recipe).slug +@user_router.post("/create-url/bulk", status_code=202) +def parse_recipe_url_bulk( + bulk: CreateRecipeByUrlBulk, + recipe_service: RecipeService = Depends(RecipeService.private), + bg_service: BackgroundExecutor = Depends(BackgroundExecutor.private), +): + """ Takes in a URL and attempts to scrape data and load it into the database """ + + def bulk_import_func(task_id: int, session: Session) -> None: + database = get_database(session) + task = database.server_tasks.get_one(task_id) + + task.append_log("test task has started") + + for b in bulk.imports: + try: + recipe = create_from_url(b.url) + + if b.tags: + recipe.tags = b.tags + + if b.categories: + recipe.recipe_category = b.categories + + recipe_service.create_one(recipe) + task.append_log(f"INFO: Created recipe from url: {b.url}") + except Exception as e: + task.append_log(f"Error: Failed to create recipe from url: {b.url}") + task.append_log(f"Error: {e}") + logger.error(f"Failed to create recipe from url: {b.url}") + logger.error(e) + database.server_tasks.update(task.id, task) + + task.set_finished() + database.server_tasks.update(task.id, task) + + bg_service.dispatch(ServerTaskNames.bulk_recipe_import, bulk_import_func) + + return {"details": "task has been started"} + + @user_router.post("/test-scrape-url") -def test_parse_recipe_url(url: CreateRecipeByURL): +def test_parse_recipe_url(url: CreateRecipeByUrl): # Debugger should produce the same result as the scraper sees before cleaning scraped_data = scrape_from_url(url.url) if scraped_data: @@ -73,11 +115,8 @@ async def get_recipe_as_zip( ): """ Get a Recipe and It's Original Image as a Zip File """ db = get_database(session) - recipe: Recipe = db.recipes.get(slug) - image_asset = recipe.image_dir.joinpath(RecipeImageTypes.original.value) - with ZipFile(temp_path, "w") as myzip: myzip.writestr(f"{slug}.json", recipe.json()) diff --git a/mealie/schema/meal_plan/new_meal.py b/mealie/schema/meal_plan/new_meal.py index 43bc5e1e5af5..1bbac905917a 100644 --- a/mealie/schema/meal_plan/new_meal.py +++ b/mealie/schema/meal_plan/new_meal.py @@ -25,7 +25,6 @@ class CreatePlanEntry(CamelModel): @validator("recipe_id", always=True) @classmethod def id_or_title(cls, value, values): - print(value, values) if bool(value) is False and bool(values["title"]) is False: raise ValueError(f"`recipe_id={value}` or `title={values['title']}` must be provided") diff --git a/mealie/schema/recipe/recipe.py b/mealie/schema/recipe/recipe.py index 05d980e9fd0c..a867819afd52 100644 --- a/mealie/schema/recipe/recipe.py +++ b/mealie/schema/recipe/recipe.py @@ -21,17 +21,6 @@ from .recipe_step import RecipeStep app_dirs = get_app_dirs() -class CreateRecipeByURL(BaseModel): - url: str - - class Config: - schema_extra = {"example": {"url": "https://myfavoriterecipes.com/recipes"}} - - -class CreateRecipe(CamelModel): - name: str - - class RecipeTag(CamelModel): name: str slug: str @@ -44,6 +33,27 @@ class RecipeCategory(RecipeTag): pass +class CreateRecipeByUrl(BaseModel): + url: str + + class Config: + schema_extra = {"example": {"url": "https://myfavoriterecipes.com/recipes"}} + + +class CreateRecipeBulk(BaseModel): + url: str + categories: list[RecipeCategory] = None + tags: list[RecipeTag] = None + + +class CreateRecipeByUrlBulk(BaseModel): + imports: list[CreateRecipeBulk] + + +class CreateRecipe(CamelModel): + name: str + + class RecipeSummary(CamelModel): id: Optional[int] diff --git a/mealie/schema/server/tasks.py b/mealie/schema/server/tasks.py index 6b617bd27ec0..10d7c7e6522e 100644 --- a/mealie/schema/server/tasks.py +++ b/mealie/schema/server/tasks.py @@ -8,6 +8,7 @@ from pydantic import Field class ServerTaskNames(str, enum.Enum): default = "Background Task" backup_task = "Database Backup" + bulk_recipe_import = "Bulk Recipe Import" class ServerTaskStatus(str, enum.Enum): diff --git a/mealie/services/_base_http_service/crud_http_mixins.py b/mealie/services/_base_http_service/crud_http_mixins.py index 45198f819af2..5b6b8d4114f2 100644 --- a/mealie/services/_base_http_service/crud_http_mixins.py +++ b/mealie/services/_base_http_service/crud_http_mixins.py @@ -37,6 +37,7 @@ class CrudHttpMixins(Generic[C, R, U], ABC): self.item = self.dal.create(data) except Exception as ex: logger.exception(ex) + self.session.rollback() msg = default_msg if exception_msgs: diff --git a/mealie/services/email/email_service.py b/mealie/services/email/email_service.py index 6bf13e7d2c12..ccbb297b3873 100644 --- a/mealie/services/email/email_service.py +++ b/mealie/services/email/email_service.py @@ -73,14 +73,3 @@ class EmailService(BaseService): button_text="Test Email", ) return self.send_email(address, test_email) - - -def main(): - print("Starting...") - service = EmailService() - service.send_test_email("hay-kot@pm.me") - print("Finished...") - - -if __name__ == "__main__": - main() diff --git a/mealie/services/parser_services/crfpp/pre_processor.py b/mealie/services/parser_services/crfpp/pre_processor.py index e8a8dc60cc45..c3151b700738 100644 --- a/mealie/services/parser_services/crfpp/pre_processor.py +++ b/mealie/services/parser_services/crfpp/pre_processor.py @@ -19,7 +19,7 @@ replace_abbreviations = { def replace_common_abbreviations(string: str) -> str: for k, v in replace_abbreviations.items(): - regex = rf"(?<=\d)\s?({k}s?)" + regex = rf"(?<=\d)\s?({k}\bs?)" string = re.sub(regex, v, string) return string diff --git a/mealie/services/scraper/cleaner.py b/mealie/services/scraper/cleaner.py index 35e84d7c0af6..af3a68654c65 100644 --- a/mealie/services/scraper/cleaner.py +++ b/mealie/services/scraper/cleaner.py @@ -43,13 +43,9 @@ def clean_string(text: str) -> str: if isinstance(text, list): text = text[0] - print(type(text)) - if text == "" or text is None: return "" - print(text) - cleaned_text = html.unescape(text) cleaned_text = re.sub("<[^<]+?>", "", cleaned_text) cleaned_text = re.sub(" +", " ", cleaned_text) @@ -201,9 +197,10 @@ def clean_time(time_entry): if time_entry is None: return None elif isinstance(time_entry, timedelta): - pretty_print_timedelta(time_entry) + return pretty_print_timedelta(time_entry) elif isinstance(time_entry, datetime): - print(time_entry) + pass + # print(time_entry) elif isinstance(time_entry, str): try: time_delta_object = parse_duration(time_entry) diff --git a/poetry.lock b/poetry.lock index 42b140e1f1f3..8ccdf36004f4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -372,6 +372,19 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.7.0,<2.8.0" pyflakes = ">=2.3.0,<2.4.0" +[[package]] +name = "flake8-print" +version = "4.0.0" +description = "print statement checker plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" +six = "*" + [[package]] name = "ghp-import" version = "2.0.2" @@ -1381,7 +1394,7 @@ pgsql = ["psycopg2-binary"] [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "89271346f576de3d209ae69639ab7227c03bb8512a1671905a48407d76371ba9" +content-hash = "31d3ee104998ad61b18322584c0cc84de32dbad0dc7657c9f7b7ae8214dae9c3" [metadata.files] aiofiles = [ @@ -1619,6 +1632,10 @@ flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] +flake8-print = [ + {file = "flake8-print-4.0.0.tar.gz", hash = "sha256:5afac374b7dc49aac2c36d04b5eb1d746d72e6f5df75a6ecaecd99e9f79c6516"}, + {file = "flake8_print-4.0.0-py3-none-any.whl", hash = "sha256:6c0efce658513169f96d7a24cf136c434dc711eb00ebd0a985eb1120103fe584"}, +] ghp-import = [ {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, diff --git a/pyproject.toml b/pyproject.toml index a610e042cc6a..194b3e216e35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ pydantic-to-typescript = "^1.0.7" rich = "^10.7.0" isort = "^5.9.3" regex = "2021.9.30" # TODO: Remove during Upgrade -> https://github.com/psf/black/issues/2524 +flake8-print = "^4.0.0" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/integration_tests/user_group_tests/test_group_invitation.py b/tests/integration_tests/user_group_tests/test_group_invitation.py index 371275f73a85..4c6186a71258 100644 --- a/tests/integration_tests/user_group_tests/test_group_invitation.py +++ b/tests/integration_tests/user_group_tests/test_group_invitation.py @@ -47,7 +47,6 @@ def register_user(api_client, invite): registration.group_token = invite response = api_client.post(Routes.register, json=registration.dict(by_alias=True)) - print(response.json()) return registration, response diff --git a/tests/integration_tests/user_group_tests/test_group_webhooks.py b/tests/integration_tests/user_group_tests/test_group_webhooks.py index 2daf0b34b4bf..239e795f031d 100644 --- a/tests/integration_tests/user_group_tests/test_group_webhooks.py +++ b/tests/integration_tests/user_group_tests/test_group_webhooks.py @@ -28,8 +28,6 @@ def test_read_webhook(api_client: TestClient, unique_user: TestUser, webhook_dat webhook = response.json() - print(webhook) - assert webhook["id"] assert webhook["name"] == webhook_data["name"] assert webhook["url"] == webhook_data["url"] diff --git a/tests/integration_tests/user_recipe_tests/test_recipe_bulk_import.py b/tests/integration_tests/user_recipe_tests/test_recipe_bulk_import.py new file mode 100644 index 000000000000..22567a00ac07 --- /dev/null +++ b/tests/integration_tests/user_recipe_tests/test_recipe_bulk_import.py @@ -0,0 +1,35 @@ +import pytest +from fastapi.testclient import TestClient + +from tests.utils.fixture_schemas import TestUser + + +class Routes: + base = "/api/recipes" + bulk = "/api/recipes/create-url/bulk" + + def item(item_id: str) -> str: + return f"{Routes.base}/{item_id}" + + +@pytest.mark.skip("Long Running Scraper") +def test_bulk_import(api_client: TestClient, unique_user: TestUser): + recipes = { + "imports": [ + {"url": "https://www.bonappetit.com/recipe/caramel-crunch-chocolate-chunklet-cookies"}, + {"url": "https://www.allrecipes.com/recipe/10813/best-chocolate-chip-cookies/"}, + ] + } + + slugs = [ + "caramel-crunch-chocolate-chunklet-cookies", + "best-chocolate-chip-cookies", + ] + + response = api_client.post(Routes.bulk, json=recipes, headers=unique_user.token) + + assert response.status_code == 201 + + for slug in slugs: + response = api_client.get(Routes.item(slug), headers=unique_user.token) + assert response.status_code == 200 diff --git a/tests/integration_tests/user_recipe_tests/test_recipe_foods.py b/tests/integration_tests/user_recipe_tests/test_recipe_foods.py index 0965c2cc2df8..c45e7e49275c 100644 --- a/tests/integration_tests/user_recipe_tests/test_recipe_foods.py +++ b/tests/integration_tests/user_recipe_tests/test_recipe_foods.py @@ -73,5 +73,4 @@ def test_delete_food(api_client: TestClient, food: dict, unique_user: TestUser): assert response.status_code == 200 response = api_client.get(Routes.item(id), headers=unique_user.token) - print(response.json()) assert response.status_code == 404