-
+
{{ recipe.name }}
-
+
@@ -273,8 +273,6 @@ import {
useMeta,
useRoute,
} from "@nuxtjs/composition-api";
-// @ts-ignore vue-markdown has no types
-import VueMarkdown from "@adapttive/vue-markdown";
// import { useRecipeMeta } from "~/composables/recipes";
import { useStaticRoutes, useUserApi } from "~/composables/api";
import RecipeChips from "~/components/Domain/Recipe/RecipeChips.vue";
@@ -296,7 +294,6 @@ export default defineComponent({
RecipePrintView,
RecipeRating,
RecipeTimeCard,
- VueMarkdown,
},
layout: "basic",
setup() {
diff --git a/frontend/types/components.d.ts b/frontend/types/components.d.ts
index 2c8e360a4fcd..0d9e4c134e74 100644
--- a/frontend/types/components.d.ts
+++ b/frontend/types/components.d.ts
@@ -21,6 +21,7 @@ import ToggleState from "@/components/global/ToggleState.vue";
import ContextMenu from "@/components/global/ContextMenu.vue";
import AppButtonCopy from "@/components/global/AppButtonCopy.vue";
import CrudTable from "@/components/global/CrudTable.vue";
+import SafeMarkdown from "@/components/global/SafeMarkdown.vue";
import InputColor from "@/components/global/InputColor.vue";
import BaseDivider from "@/components/global/BaseDivider.vue";
import AutoForm from "@/components/global/AutoForm.vue";
@@ -59,6 +60,7 @@ declare module "vue" {
ContextMenu: typeof ContextMenu;
AppButtonCopy: typeof AppButtonCopy;
CrudTable: typeof CrudTable;
+ SafeMarkdown: typeof SafeMarkdown;
InputColor: typeof InputColor;
BaseDivider: typeof BaseDivider;
AutoForm: typeof AutoForm;
diff --git a/mealie/core/settings/static.py b/mealie/core/settings/static.py
index 74439963d4ae..b5d36ee9522f 100644
--- a/mealie/core/settings/static.py
+++ b/mealie/core/settings/static.py
@@ -1,6 +1,6 @@
from pathlib import Path
-APP_VERSION = "v1.0.0beta-3"
+APP_VERSION = "v1.0.0beta-4"
CWD = Path(__file__).parent
BASE_DIR = CWD.parent.parent.parent
diff --git a/mealie/routes/recipe/recipe_crud_routes.py b/mealie/routes/recipe/recipe_crud_routes.py
index 170fc76913f6..c25b71c77793 100644
--- a/mealie/routes/recipe/recipe_crud_routes.py
+++ b/mealie/routes/recipe/recipe_crud_routes.py
@@ -29,13 +29,13 @@ from mealie.schema.response.responses import ErrorResponse
from mealie.services import urls
from mealie.services.event_bus_service.event_bus_service import EventBusService, EventSource
from mealie.services.event_bus_service.message_types import EventTypes
-from mealie.services.recipe.recipe_data_service import RecipeDataService
+from mealie.services.recipe.recipe_data_service import InvalidDomainError, NotAnImageError, RecipeDataService
from mealie.services.recipe.recipe_service import RecipeService
from mealie.services.recipe.template_service import TemplateService
from mealie.services.scraper.recipe_bulk_scraper import RecipeBulkScraperService
from mealie.services.scraper.scraped_extras import ScraperContext
from mealie.services.scraper.scraper import create_from_url
-from mealie.services.scraper.scraper_strategies import RecipeScraperPackage
+from mealie.services.scraper.scraper_strategies import ForceTimeoutException, RecipeScraperPackage
class BaseRecipeController(BaseUserController):
@@ -139,7 +139,12 @@ class RecipeController(BaseRecipeController):
@router.post("/create-url", status_code=201, response_model=str)
def parse_recipe_url(self, req: ScrapeRecipe):
"""Takes in a URL and attempts to scrape data and load it into the database"""
- recipe, extras = create_from_url(req.url)
+ try:
+ recipe, extras = create_from_url(req.url)
+ except ForceTimeoutException as e:
+ raise HTTPException(
+ status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out")
+ ) from e
if req.include_tags:
ctx = ScraperContext(self.user.id, self.group_id, self.repos)
@@ -176,8 +181,13 @@ class RecipeController(BaseRecipeController):
@router.post("/test-scrape-url")
def test_parse_recipe_url(self, url: ScrapeRecipeTest):
# Debugger should produce the same result as the scraper sees before cleaning
- if scraped_data := RecipeScraperPackage(url.url).scrape_url():
- return scraped_data.schema.data
+ try:
+ if scraped_data := RecipeScraperPackage(url.url).scrape_url():
+ return scraped_data.schema.data
+ except ForceTimeoutException as e:
+ raise HTTPException(
+ status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out")
+ ) from e
return "recipe_scrapers was unable to scrape this URL"
@@ -314,7 +324,19 @@ class RecipeController(BaseRecipeController):
def scrape_image_url(self, slug: str, url: ScrapeRecipe):
recipe = self.mixins.get_one(slug)
data_service = RecipeDataService(recipe.id)
- data_service.scrape_image(url.url)
+
+ try:
+ data_service.scrape_image(url.url)
+ except NotAnImageError as e:
+ raise HTTPException(
+ status_code=400,
+ detail=ErrorResponse.respond("Url is not an image"),
+ ) from e
+ except InvalidDomainError as e:
+ raise HTTPException(
+ status_code=400,
+ detail=ErrorResponse.respond("Url is not from an allowed domain"),
+ ) from e
recipe.image = cache.cache_key.new_key()
self.service.update_one(recipe.slug, recipe)
@@ -338,13 +360,27 @@ class RecipeController(BaseRecipeController):
file: UploadFile = File(...),
):
"""Upload a file to store as a recipe asset"""
- file_name = f"{slugify(name)}.{extension}"
+ if "." in extension:
+ extension = extension.split(".")[-1]
+
+ file_slug = slugify(name)
+ if not extension or not file_slug:
+ raise HTTPException(status_code=400, detail="Missing required fields")
+
+ file_name = f"{file_slug}.{extension}"
asset_in = RecipeAsset(name=name, icon=icon, file_name=file_name)
recipe = self.mixins.get_one(slug)
dest = recipe.asset_dir / file_name
+ # Ensure path is relative to the recipe's asset directory
+ if dest.absolute().parent != recipe.asset_dir:
+ raise HTTPException(
+ status_code=400,
+ detail=f"File name {file_name} or extension {extension} not valid",
+ )
+
with dest.open("wb") as buffer:
copyfileobj(file.file, buffer)
diff --git a/mealie/services/recipe/recipe_data_service.py b/mealie/services/recipe/recipe_data_service.py
index 912454848e43..a26fbb4680f0 100644
--- a/mealie/services/recipe/recipe_data_service.py
+++ b/mealie/services/recipe/recipe_data_service.py
@@ -11,6 +11,14 @@ from mealie.services._base_service import BaseService
_FIREFOX_UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:86.0) Gecko/20100101 Firefox/86.0"
+class NotAnImageError(Exception):
+ pass
+
+
+class InvalidDomainError(Exception):
+ pass
+
+
class RecipeDataService(BaseService):
minifier: img.ABCMinifier
@@ -56,8 +64,26 @@ class RecipeDataService(BaseService):
return image_path
+ @staticmethod
+ def _validate_image_url(url: str) -> bool:
+ # sourcery skip: invert-any-all, use-any
+ """
+ Validates that the URL is of an allowed source and restricts certain sources to prevent
+ malicious images from being downloaded.
+ """
+ invalid_domains = {"127.0.0.1", "localhost"}
+ for domain in invalid_domains:
+ if domain in url:
+ return False
+
+ return True
+
def scrape_image(self, image_url) -> None:
- self.logger.info(f"Image URL: {image_url}")
+ self.logger.debug(f"Image URL: {image_url}")
+
+ if not self._validate_image_url(image_url):
+ self.logger.error(f"Invalid image URL: {image_url}")
+ raise InvalidDomainError(f"Invalid domain: {image_url}")
if isinstance(image_url, str): # Handles String Types
pass
@@ -74,7 +100,7 @@ class RecipeDataService(BaseService):
try:
r = requests.get(url, stream=True, headers={"User-Agent": _FIREFOX_UA})
except Exception:
- self.logger.exception("Image {url} could not be requested")
+ self.logger.exception(f"Image {url} could not be requested")
continue
if r.status_code == 200:
all_image_requests.append((url, r))
@@ -100,9 +126,19 @@ class RecipeDataService(BaseService):
self.logger.exception("Fatal Image Request Exception")
return None
- if r.status_code == 200:
- r.raw.decode_content = True
- self.logger.info(f"File Name Suffix {file_path.suffix}")
- self.write_image(r.raw, file_path.suffix)
+ if r.status_code != 200:
+ # TODO: Probably should throw an exception in this case as well, but before these changes
+ # we were returning None if it failed anyways.
+ return None
- file_path.unlink(missing_ok=True)
+ content_type = r.headers.get("content-type", "")
+
+ if "image" not in content_type:
+ self.logger.error(f"Content-Type: {content_type} is not an image")
+ raise NotAnImageError(f"Content-Type {content_type} is not an image")
+
+ r.raw.decode_content = True
+ self.logger.info(f"File Name Suffix {file_path.suffix}")
+ self.write_image(r.raw, file_path.suffix)
+
+ file_path.unlink(missing_ok=True)
diff --git a/mealie/services/scraper/scraper_strategies.py b/mealie/services/scraper/scraper_strategies.py
index 701367376734..074e760fa546 100644
--- a/mealie/services/scraper/scraper_strategies.py
+++ b/mealie/services/scraper/scraper_strategies.py
@@ -1,10 +1,11 @@
+import time
from abc import ABC, abstractmethod
from typing import Any, Callable
import extruct
import requests
from fastapi import HTTPException, status
-from recipe_scrapers import NoSchemaFoundInWildMode, SchemaScraperFactory, WebsiteNotImplementedError, scrape_me
+from recipe_scrapers import NoSchemaFoundInWildMode, SchemaScraperFactory, scrape_html
from slugify import slugify
from w3lib.html import get_base_url
@@ -14,6 +15,59 @@ from mealie.services.scraper.scraped_extras import ScrapedExtras
from . import cleaner
+SCRAPER_TIMEOUT = 15
+
+
+class ForceTimeoutException(Exception):
+ pass
+
+
+def safe_scrape_html(url: str) -> str:
+ """
+ Scrapes the html from a url but will cancel the request
+ if the request takes longer than 15 seconds. This is used to mitigate
+ DDOS attacks from users providing a url with arbitrary large content.
+ """
+ resp = requests.get(url, timeout=SCRAPER_TIMEOUT, stream=True)
+
+ html_bytes = b""
+
+ start_time = time.time()
+
+ for chunk in resp.iter_content(chunk_size=1024):
+ html_bytes += chunk
+
+ if time.time() - start_time > SCRAPER_TIMEOUT:
+ raise ForceTimeoutException()
+
+ # =====================================
+ # Coppied from requests text property
+
+ # Try charset from content-type
+ content = None
+ encoding = resp.encoding
+
+ if not html_bytes:
+ return ""
+
+ # Fallback to auto-detected encoding.
+ if encoding is None:
+ encoding = resp.apparent_encoding
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(html_bytes, encoding, errors="replace")
+ except (LookupError, TypeError):
+ # A LookupError is raised if the encoding was not found which could
+ # indicate a misspelling or similar mistake.
+ #
+ # A TypeError can be raised if encoding is None
+ #
+ # So we try blindly encoding.
+ content = str(html_bytes, errors="replace")
+
+ return content
+
class ABCScraperStrategy(ABC):
"""
@@ -103,14 +157,13 @@ class RecipeScraperPackage(ABCScraperStrategy):
return recipe, extras
def scrape_url(self) -> SchemaScraperFactory.SchemaScraper | Any | None:
+ recipe_html = safe_scrape_html(self.url)
+
try:
- scraped_schema = scrape_me(self.url)
- except (WebsiteNotImplementedError, AttributeError):
- try:
- scraped_schema = scrape_me(self.url, wild_mode=True)
- except (NoSchemaFoundInWildMode, AttributeError):
- self.logger.error("Recipe Scraper was unable to extract a recipe.")
- return None
+ scraped_schema = scrape_html(recipe_html, org_url=self.url)
+ except (NoSchemaFoundInWildMode, AttributeError):
+ self.logger.error("Recipe Scraper was unable to extract a recipe.")
+ return None
except ConnectionError as e:
raise HTTPException(status.HTTP_400_BAD_REQUEST, {"details": "CONNECTION_ERROR"}) from e
@@ -150,7 +203,7 @@ class RecipeScraperOpenGraph(ABCScraperStrategy):
"""
def get_html(self) -> str:
- return requests.get(self.url).text
+ return safe_scrape_html(self.url)
def get_recipe_fields(self, html) -> dict | None:
"""
diff --git a/poetry.lock b/poetry.lock
index d9b40424a2ea..c5e566b9e388 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1190,7 +1190,7 @@ rdflib = ">=5.0.0"
[[package]]
name = "recipe-scrapers"
-version = "14.7.0"
+version = "14.11.0"
description = "Python package, scraping recipes from all over the internet"
category = "main"
optional = false
@@ -2288,7 +2288,10 @@ rdflib-jsonld = [
{file = "rdflib-jsonld-0.6.2.tar.gz", hash = "sha256:107cd3019d41354c31687e64af5e3fd3c3e3fa5052ce635f5ce595fd31853a63"},
{file = "rdflib_jsonld-0.6.2-py2.py3-none-any.whl", hash = "sha256:011afe67672353ca9978ab9a4bee964dff91f14042f2d8a28c22a573779d2f8b"},
]
-recipe-scrapers = []
+recipe-scrapers = [
+ {file = "recipe_scrapers-14.11.0-py3-none-any.whl", hash = "sha256:992b37ef2c29d66caaec82b2c5a1f9d901a74d2e267e60e505370c59ceadaeef"},
+ {file = "recipe_scrapers-14.11.0.tar.gz", hash = "sha256:85192e976388eeba9bb314c5cf75ac087ec1cfaf4b4aa1ffe580dae4099e2be9"},
+]
requests = []
requests-oauthlib = [
{file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
diff --git a/pyproject.toml b/pyproject.toml
index eb8ebb3d9954..57ae91fe0a78 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,7 +3,7 @@ name = "mealie"
version = "1.0.0b"
description = "A Recipe Manager"
authors = ["Hayden "]
-license = "MIT"
+license = "AGPL"
[tool.poetry.scripts]
start = "mealie.app:main"
diff --git a/tests/fixtures/fixture_recipe.py b/tests/fixtures/fixture_recipe.py
index a8eceff5178f..e84b2075e853 100644
--- a/tests/fixtures/fixture_recipe.py
+++ b/tests/fixtures/fixture_recipe.py
@@ -1,9 +1,12 @@
+import contextlib
+from collections.abc import Generator
+
import sqlalchemy
from pytest import fixture
from mealie.repos.repository_factory import AllRepositories
-from mealie.schema.recipe.recipe import Recipe, RecipeCategory
-from mealie.schema.recipe.recipe_category import CategorySave
+from mealie.schema.recipe.recipe import Recipe
+from mealie.schema.recipe.recipe_category import CategoryOut, CategorySave
from mealie.schema.recipe.recipe_ingredient import RecipeIngredient
from mealie.schema.recipe.recipe_step import RecipeStep
from tests.utils.factories import random_string
@@ -47,15 +50,13 @@ def recipe_ingredient_only(database: AllRepositories, unique_user: TestUser):
yield model
- try:
+ with contextlib.suppress(sqlalchemy.exc.NoResultFound):
database.recipes.delete(model.slug)
- except sqlalchemy.exc.NoResultFound: # Entry Deleted in Test
- pass
@fixture(scope="function")
-def recipe_categories(database: AllRepositories, unique_user: TestUser) -> list[RecipeCategory]:
- models: list[RecipeCategory] = []
+def recipe_categories(database: AllRepositories, unique_user: TestUser) -> Generator[list[CategoryOut], None, None]:
+ models: list[CategoryOut] = []
for _ in range(3):
category = CategorySave(
group_id=unique_user.group_id,
@@ -66,15 +67,13 @@ def recipe_categories(database: AllRepositories, unique_user: TestUser) -> list[
yield models
- for model in models:
- try:
- database.categories.delete(model.id)
- except sqlalchemy.exc.NoResultFound:
- pass
+ for m in models:
+ with contextlib.suppress(sqlalchemy.exc.NoResultFound):
+ database.categories.delete(m.id)
@fixture(scope="function")
-def random_recipe(database: AllRepositories, unique_user: TestUser) -> Recipe:
+def random_recipe(database: AllRepositories, unique_user: TestUser) -> Generator[Recipe, None, None]:
recipe = Recipe(
user_id=unique_user.user_id,
group_id=unique_user.group_id,
@@ -95,7 +94,5 @@ def random_recipe(database: AllRepositories, unique_user: TestUser) -> Recipe:
yield model
- try:
+ with contextlib.suppress(sqlalchemy.exc.NoResultFound):
database.recipes.delete(model.slug)
- except sqlalchemy.exc.NoResultFound:
- pass
diff --git a/tests/integration_tests/user_recipe_tests/test_recipe_image_assets.py b/tests/integration_tests/user_recipe_tests/test_recipe_image_assets.py
index b269a248f0a9..c6a2f57188eb 100644
--- a/tests/integration_tests/user_recipe_tests/test_recipe_image_assets.py
+++ b/tests/integration_tests/user_recipe_tests/test_recipe_image_assets.py
@@ -12,7 +12,6 @@ from tests.utils.fixture_schemas import TestUser
def test_recipe_assets_create(api_client: TestClient, unique_user: TestUser, recipe_ingredient_only: Recipe):
recipe = recipe_ingredient_only
payload = {
- "slug": recipe.slug,
"name": random_string(10),
"icon": random_string(10),
"extension": "jpg",
@@ -43,6 +42,51 @@ def test_recipe_assets_create(api_client: TestClient, unique_user: TestUser, rec
assert recipe_respons["assets"][0]["name"] == payload["name"]
+def test_recipe_asset_exploit(api_client: TestClient, unique_user: TestUser, recipe_ingredient_only: Recipe):
+ """
+ Test to ensure that users are unable to circumvent the destination directory when uploading a file
+ as an asset to the recipe. This was reported via huntr and was confirmed to be a sevre security issue.
+
+ mitigration is implemented by ensuring that the destination file is checked to ensure that the parent directory
+ is the recipe's asset directory. otherwise an exception is raised and a 400 error is returned.
+
+ Report Details:
+ -------------------
+ Arbitrary template creation leading to Authenticated Remote Code Execution in hay-kot/mealie
+
+ An attacker who is able to execute such a flaw is able to execute commands with the privileges
+ of the programming language or the web server. In this case, since the attacker is root in a
+ Docker container they can execute system commands, read/modify databases, attack adjacent
+ systems. This flaw leads to a complete compromise of the system.
+
+ https://huntr.dev/bounties/3ecd4a78-523e-4f84-a3fd-31a01a68f142/
+ """
+
+ recipe = recipe_ingredient_only
+ payload = {
+ "name": "$",
+ "icon": random_string(10),
+ "extension": "./test.txt",
+ }
+
+ file_payload = {
+ "file": data.images_test_image_1.read_bytes(),
+ }
+
+ response = api_client.post(
+ f"/api/recipes/{recipe.slug}/assets",
+ data=payload,
+ files=file_payload,
+ headers=unique_user.token,
+ )
+
+ assert response.status_code == 400
+
+ # Ensure File was not created
+ assert not (recipe.asset_dir.parent / "test.txt").exists()
+ assert not (recipe.asset_dir / "test.txt").exists()
+
+
def test_recipe_image_upload(api_client: TestClient, unique_user: TestUser, recipe_ingredient_only: Recipe):
data_payload = {"extension": "jpg"}
file_payload = {"image": data.images_test_image_1.read_bytes()}