mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-05-24 01:12:54 -04:00
fixed tests
This commit is contained in:
parent
408df286fd
commit
3a30b3216e
@ -4,6 +4,7 @@ from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from mealie.lang.providers import local_provider
|
||||
from mealie.services.scraper import cleaner
|
||||
from mealie.services.scraper.scraper_strategies import RecipeScraperOpenGraph
|
||||
from tests import data as test_data
|
||||
@ -37,15 +38,17 @@ test_cleaner_data = [
|
||||
|
||||
@pytest.mark.parametrize("json_file,num_steps", test_cleaner_data)
|
||||
def test_cleaner_clean(json_file: Path, num_steps):
|
||||
recipe_data = cleaner.clean(json.loads(json_file.read_text()))
|
||||
translator = local_provider()
|
||||
recipe_data = cleaner.clean(json.loads(json_file.read_text()), translator)
|
||||
assert len(recipe_data["recipeInstructions"]) == num_steps
|
||||
|
||||
|
||||
def test_html_with_recipe_data():
|
||||
path = test_data.html_healthy_pasta_bake_60759
|
||||
url = "https://www.bbc.co.uk/food/recipes/healthy_pasta_bake_60759"
|
||||
translator = local_provider()
|
||||
|
||||
open_graph_strategy = RecipeScraperOpenGraph(url)
|
||||
open_graph_strategy = RecipeScraperOpenGraph(url, translator)
|
||||
|
||||
recipe_data = open_graph_strategy.get_recipe_fields(path.read_text())
|
||||
|
||||
|
@ -4,6 +4,7 @@ from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from mealie.lang.providers import local_provider
|
||||
from mealie.services.scraper import cleaner
|
||||
|
||||
|
||||
@ -324,32 +325,32 @@ time_test_cases = (
|
||||
CleanerCase(
|
||||
test_id="timedelta",
|
||||
input=timedelta(minutes=30),
|
||||
expected="30 Minutes",
|
||||
expected="30 minutes",
|
||||
),
|
||||
CleanerCase(
|
||||
test_id="timedelta string (1)",
|
||||
input="PT2H30M",
|
||||
expected="2 Hours 30 Minutes",
|
||||
expected="2 hours 30 minutes",
|
||||
),
|
||||
CleanerCase(
|
||||
test_id="timedelta string (2)",
|
||||
input="PT30M",
|
||||
expected="30 Minutes",
|
||||
expected="30 minutes",
|
||||
),
|
||||
CleanerCase(
|
||||
test_id="timedelta string (3)",
|
||||
input="PT2H",
|
||||
expected="2 Hours",
|
||||
expected="2 hours",
|
||||
),
|
||||
CleanerCase(
|
||||
test_id="timedelta string (4)",
|
||||
input="P1DT1H1M1S",
|
||||
expected="1 day 1 Hour 1 Minute 1 Second",
|
||||
expected="1 day 1 hour 1 minute 1 second",
|
||||
),
|
||||
CleanerCase(
|
||||
test_id="timedelta string (4)",
|
||||
input="P1DT1H1M1.53S",
|
||||
expected="1 day 1 Hour 1 Minute 1 Second",
|
||||
expected="1 day 1 hour 1 minute 1 second",
|
||||
),
|
||||
CleanerCase(
|
||||
test_id="timedelta string (5) invalid",
|
||||
@ -366,7 +367,8 @@ time_test_cases = (
|
||||
|
||||
@pytest.mark.parametrize("case", time_test_cases, ids=(x.test_id for x in time_test_cases))
|
||||
def test_cleaner_clean_time(case: CleanerCase):
|
||||
result = cleaner.clean_time(case.input)
|
||||
translator = local_provider()
|
||||
result = cleaner.clean_time(case.input, translator)
|
||||
assert case.expected == result
|
||||
|
||||
|
||||
@ -536,10 +538,11 @@ def test_cleaner_clean_nutrition(case: CleanerCase):
|
||||
@pytest.mark.parametrize(
|
||||
"t,max_components,max_decimal_places,expected",
|
||||
[
|
||||
(timedelta(days=2, seconds=17280), None, 2, "2 days 4 Hours 48 Minutes"),
|
||||
(timedelta(days=2, seconds=17280), None, 2, "2 days 4 hours 48 minutes"),
|
||||
(timedelta(days=2, seconds=17280), 1, 2, "2.2 days"),
|
||||
(timedelta(days=365), None, 2, "1 year"),
|
||||
],
|
||||
)
|
||||
def test_pretty_print_timedelta(t, max_components, max_decimal_places, expected):
|
||||
assert cleaner.pretty_print_timedelta(t, max_components, max_decimal_places) == expected
|
||||
translator = local_provider()
|
||||
assert cleaner.pretty_print_timedelta(t, translator, max_components, max_decimal_places) == expected
|
||||
|
@ -1,5 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from mealie.lang.providers import local_provider
|
||||
from mealie.services.scraper import scraper
|
||||
from tests.utils.recipe_data import RecipeSiteTestCase, get_recipe_test_cases
|
||||
|
||||
@ -18,9 +19,10 @@ and then use this test case by removing the `@pytest.mark.skip` and than testing
|
||||
@pytest.mark.parametrize("recipe_test_data", test_cases)
|
||||
@pytest.mark.asyncio
|
||||
async def test_recipe_parser(recipe_test_data: RecipeSiteTestCase):
|
||||
recipe, _ = await scraper.create_from_url(recipe_test_data.url)
|
||||
translator = local_provider()
|
||||
recipe, _ = await scraper.create_from_url(recipe_test_data.url, translator)
|
||||
|
||||
assert recipe.slug == recipe_test_data.expected_slug
|
||||
assert len(recipe.recipe_instructions) == recipe_test_data.num_steps
|
||||
assert len(recipe.recipe_instructions or []) == recipe_test_data.num_steps
|
||||
assert len(recipe.recipe_ingredient) == recipe_test_data.num_ingredients
|
||||
assert recipe.org_url == recipe_test_data.url
|
||||
|
Loading…
x
Reference in New Issue
Block a user